VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 96954

Last change on this file since 96954 was 96954, checked in by vboxsync, 2 years ago

IEM: Nits, comments in TwoByte0f decoder.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 475.4 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 96954 2022-09-30 12:41:37Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 */
42FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
43{
44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
45 if (IEM_IS_MODRM_REG_MODE(bRm))
46 {
47 /*
48 * Register, register.
49 */
50 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
51 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
52 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
53 IEM_MC_BEGIN(2, 0);
54 IEM_MC_ARG(uint64_t *, pDst, 0);
55 IEM_MC_ARG(uint64_t const *, pSrc, 1);
56 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
57 IEM_MC_PREPARE_FPU_USAGE();
58 IEM_MC_FPU_TO_MMX_MODE();
59
60 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
61 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
62 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
63 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
64
65 IEM_MC_ADVANCE_RIP();
66 IEM_MC_END();
67 }
68 else
69 {
70 /*
71 * Register, memory.
72 */
73 IEM_MC_BEGIN(2, 2);
74 IEM_MC_ARG(uint64_t *, pDst, 0);
75 IEM_MC_LOCAL(uint64_t, uSrc);
76 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
77 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
78
79 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
80 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
81 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
82 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
83
84 IEM_MC_PREPARE_FPU_USAGE();
85 IEM_MC_FPU_TO_MMX_MODE();
86
87 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
88 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
89 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
90
91 IEM_MC_ADVANCE_RIP();
92 IEM_MC_END();
93 }
94 return VINF_SUCCESS;
95}
96
97
98/**
99 * Common worker for MMX instructions on the form:
100 * pxxx mm1, mm2/mem64
101 *
102 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
103 * no FXSAVE state, just the operands.
104 */
105FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
106{
107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
108 if (IEM_IS_MODRM_REG_MODE(bRm))
109 {
110 /*
111 * Register, register.
112 */
113 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
114 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
116 IEM_MC_BEGIN(2, 0);
117 IEM_MC_ARG(uint64_t *, pDst, 0);
118 IEM_MC_ARG(uint64_t const *, pSrc, 1);
119 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
120 IEM_MC_PREPARE_FPU_USAGE();
121 IEM_MC_FPU_TO_MMX_MODE();
122
123 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
124 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
125 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
126 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
127
128 IEM_MC_ADVANCE_RIP();
129 IEM_MC_END();
130 }
131 else
132 {
133 /*
134 * Register, memory.
135 */
136 IEM_MC_BEGIN(2, 2);
137 IEM_MC_ARG(uint64_t *, pDst, 0);
138 IEM_MC_LOCAL(uint64_t, uSrc);
139 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
140 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
141
142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
144 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
145 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
146
147 IEM_MC_PREPARE_FPU_USAGE();
148 IEM_MC_FPU_TO_MMX_MODE();
149
150 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
151 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
152 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
153
154 IEM_MC_ADVANCE_RIP();
155 IEM_MC_END();
156 }
157 return VINF_SUCCESS;
158}
159
160
161/**
162 * Common worker for MMX instructions on the form:
163 * pxxx mm1, mm2/mem64
164 * for instructions introduced with SSE.
165 */
166FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
167{
168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
169 if (IEM_IS_MODRM_REG_MODE(bRm))
170 {
171 /*
172 * Register, register.
173 */
174 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
175 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
177 IEM_MC_BEGIN(2, 0);
178 IEM_MC_ARG(uint64_t *, pDst, 0);
179 IEM_MC_ARG(uint64_t const *, pSrc, 1);
180 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
181 IEM_MC_PREPARE_FPU_USAGE();
182 IEM_MC_FPU_TO_MMX_MODE();
183
184 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
185 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
186 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
187 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
188
189 IEM_MC_ADVANCE_RIP();
190 IEM_MC_END();
191 }
192 else
193 {
194 /*
195 * Register, memory.
196 */
197 IEM_MC_BEGIN(2, 2);
198 IEM_MC_ARG(uint64_t *, pDst, 0);
199 IEM_MC_LOCAL(uint64_t, uSrc);
200 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
202
203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
205 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
206 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
207
208 IEM_MC_PREPARE_FPU_USAGE();
209 IEM_MC_FPU_TO_MMX_MODE();
210
211 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
212 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
213 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
214
215 IEM_MC_ADVANCE_RIP();
216 IEM_MC_END();
217 }
218 return VINF_SUCCESS;
219}
220
221
222/**
223 * Common worker for MMX instructions on the form:
224 * pxxx mm1, mm2/mem64
225 * for instructions introduced with SSE.
226 *
227 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
228 * no FXSAVE state, just the operands.
229 */
230FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
231{
232 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
233 if (IEM_IS_MODRM_REG_MODE(bRm))
234 {
235 /*
236 * Register, register.
237 */
238 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
239 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
241 IEM_MC_BEGIN(2, 0);
242 IEM_MC_ARG(uint64_t *, pDst, 0);
243 IEM_MC_ARG(uint64_t const *, pSrc, 1);
244 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
245 IEM_MC_PREPARE_FPU_USAGE();
246 IEM_MC_FPU_TO_MMX_MODE();
247
248 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
249 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
250 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
251 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
252
253 IEM_MC_ADVANCE_RIP();
254 IEM_MC_END();
255 }
256 else
257 {
258 /*
259 * Register, memory.
260 */
261 IEM_MC_BEGIN(2, 2);
262 IEM_MC_ARG(uint64_t *, pDst, 0);
263 IEM_MC_LOCAL(uint64_t, uSrc);
264 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
265 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
266
267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
269 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
270 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
271
272 IEM_MC_PREPARE_FPU_USAGE();
273 IEM_MC_FPU_TO_MMX_MODE();
274
275 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
276 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
277 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
278
279 IEM_MC_ADVANCE_RIP();
280 IEM_MC_END();
281 }
282 return VINF_SUCCESS;
283}
284
285
286/**
287 * Common worker for MMX instructions on the form:
288 * pxxx mm1, mm2/mem64
289 * that was introduced with SSE2.
290 */
291FNIEMOP_DEF_2(iemOpCommonMmx_FullFull_To_Full_Ex, PFNIEMAIMPLMEDIAF2U64, pfnU64, bool, fSupported)
292{
293 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
294 if (IEM_IS_MODRM_REG_MODE(bRm))
295 {
296 /*
297 * Register, register.
298 */
299 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
300 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
302 IEM_MC_BEGIN(2, 0);
303 IEM_MC_ARG(uint64_t *, pDst, 0);
304 IEM_MC_ARG(uint64_t const *, pSrc, 1);
305 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
306 IEM_MC_PREPARE_FPU_USAGE();
307 IEM_MC_FPU_TO_MMX_MODE();
308
309 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
310 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
311 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
312 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
313
314 IEM_MC_ADVANCE_RIP();
315 IEM_MC_END();
316 }
317 else
318 {
319 /*
320 * Register, memory.
321 */
322 IEM_MC_BEGIN(2, 2);
323 IEM_MC_ARG(uint64_t *, pDst, 0);
324 IEM_MC_LOCAL(uint64_t, uSrc);
325 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
327
328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
330 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
331 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
332
333 IEM_MC_PREPARE_FPU_USAGE();
334 IEM_MC_FPU_TO_MMX_MODE();
335
336 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
337 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
338 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
339
340 IEM_MC_ADVANCE_RIP();
341 IEM_MC_END();
342 }
343 return VINF_SUCCESS;
344}
345
346
347/**
348 * Common worker for SSE2 instructions on the forms:
349 * pxxx xmm1, xmm2/mem128
350 *
351 * Proper alignment of the 128-bit operand is enforced.
352 * Exceptions type 4. SSE2 cpuid checks.
353 *
354 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
355 */
356FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
357{
358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
359 if (IEM_IS_MODRM_REG_MODE(bRm))
360 {
361 /*
362 * Register, register.
363 */
364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
365 IEM_MC_BEGIN(2, 0);
366 IEM_MC_ARG(PRTUINT128U, pDst, 0);
367 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
368 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
369 IEM_MC_PREPARE_SSE_USAGE();
370 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
371 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
372 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
373 IEM_MC_ADVANCE_RIP();
374 IEM_MC_END();
375 }
376 else
377 {
378 /*
379 * Register, memory.
380 */
381 IEM_MC_BEGIN(2, 2);
382 IEM_MC_ARG(PRTUINT128U, pDst, 0);
383 IEM_MC_LOCAL(RTUINT128U, uSrc);
384 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
386
387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
389 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
390 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
391
392 IEM_MC_PREPARE_SSE_USAGE();
393 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
394 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
395
396 IEM_MC_ADVANCE_RIP();
397 IEM_MC_END();
398 }
399 return VINF_SUCCESS;
400}
401
402
403/**
404 * Common worker for SSE2 instructions on the forms:
405 * pxxx xmm1, xmm2/mem128
406 *
407 * Proper alignment of the 128-bit operand is enforced.
408 * Exceptions type 4. SSE2 cpuid checks.
409 *
410 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
411 * no FXSAVE state, just the operands.
412 *
413 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
414 */
415FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
416{
417 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
418 if (IEM_IS_MODRM_REG_MODE(bRm))
419 {
420 /*
421 * Register, register.
422 */
423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
424 IEM_MC_BEGIN(2, 0);
425 IEM_MC_ARG(PRTUINT128U, pDst, 0);
426 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
427 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
428 IEM_MC_PREPARE_SSE_USAGE();
429 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
430 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
431 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
432 IEM_MC_ADVANCE_RIP();
433 IEM_MC_END();
434 }
435 else
436 {
437 /*
438 * Register, memory.
439 */
440 IEM_MC_BEGIN(2, 2);
441 IEM_MC_ARG(PRTUINT128U, pDst, 0);
442 IEM_MC_LOCAL(RTUINT128U, uSrc);
443 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
445
446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
448 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
449 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
450
451 IEM_MC_PREPARE_SSE_USAGE();
452 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
453 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
454
455 IEM_MC_ADVANCE_RIP();
456 IEM_MC_END();
457 }
458 return VINF_SUCCESS;
459}
460
461
462/**
463 * Common worker for MMX instructions on the forms:
464 * pxxxx mm1, mm2/mem32
465 *
466 * The 2nd operand is the first half of a register, which in the memory case
467 * means a 32-bit memory access.
468 */
469FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, FNIEMAIMPLMEDIAOPTF2U64, pfnU64)
470{
471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
472 if (IEM_IS_MODRM_REG_MODE(bRm))
473 {
474 /*
475 * Register, register.
476 */
477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
478 IEM_MC_BEGIN(2, 0);
479 IEM_MC_ARG(uint64_t *, puDst, 0);
480 IEM_MC_ARG(uint64_t const *, puSrc, 1);
481 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
482 IEM_MC_PREPARE_FPU_USAGE();
483 IEM_MC_FPU_TO_MMX_MODE();
484
485 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
486 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
487 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
488 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
489
490 IEM_MC_ADVANCE_RIP();
491 IEM_MC_END();
492 }
493 else
494 {
495 /*
496 * Register, memory.
497 */
498 IEM_MC_BEGIN(2, 2);
499 IEM_MC_ARG(uint64_t *, puDst, 0);
500 IEM_MC_LOCAL(uint64_t, uSrc);
501 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
503
504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
506 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
507 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
508
509 IEM_MC_PREPARE_FPU_USAGE();
510 IEM_MC_FPU_TO_MMX_MODE();
511
512 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
513 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
514 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
515
516 IEM_MC_ADVANCE_RIP();
517 IEM_MC_END();
518 }
519 return VINF_SUCCESS;
520}
521
522
523/**
524 * Common worker for SSE instructions on the forms:
525 * pxxxx xmm1, xmm2/mem128
526 *
527 * The 2nd operand is the first half of a register, which in the memory case
528 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
529 *
530 * Exceptions type 4.
531 */
532FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
533{
534 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
535 if (IEM_IS_MODRM_REG_MODE(bRm))
536 {
537 /*
538 * Register, register.
539 */
540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
541 IEM_MC_BEGIN(2, 0);
542 IEM_MC_ARG(PRTUINT128U, puDst, 0);
543 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
544 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
545 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
546 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
547 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
548 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
549 IEM_MC_ADVANCE_RIP();
550 IEM_MC_END();
551 }
552 else
553 {
554 /*
555 * Register, memory.
556 */
557 IEM_MC_BEGIN(2, 2);
558 IEM_MC_ARG(PRTUINT128U, puDst, 0);
559 IEM_MC_LOCAL(RTUINT128U, uSrc);
560 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
562
563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
565 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
566 /** @todo Most CPUs probably only read the low qword. We read everything to
567 * make sure we apply segmentation and alignment checks correctly.
568 * When we have time, it would be interesting to explore what real
569 * CPUs actually does and whether it will do a TLB load for the high
570 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
571 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
572
573 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
574 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
575 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
576
577 IEM_MC_ADVANCE_RIP();
578 IEM_MC_END();
579 }
580 return VINF_SUCCESS;
581}
582
583
584/**
585 * Common worker for SSE2 instructions on the forms:
586 * pxxxx xmm1, xmm2/mem128
587 *
588 * The 2nd operand is the first half of a register, which in the memory case
589 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
590 *
591 * Exceptions type 4.
592 */
593FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
594{
595 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
596 if (IEM_IS_MODRM_REG_MODE(bRm))
597 {
598 /*
599 * Register, register.
600 */
601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
602 IEM_MC_BEGIN(2, 0);
603 IEM_MC_ARG(PRTUINT128U, puDst, 0);
604 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
605 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
606 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
607 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
608 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
609 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
610 IEM_MC_ADVANCE_RIP();
611 IEM_MC_END();
612 }
613 else
614 {
615 /*
616 * Register, memory.
617 */
618 IEM_MC_BEGIN(2, 2);
619 IEM_MC_ARG(PRTUINT128U, puDst, 0);
620 IEM_MC_LOCAL(RTUINT128U, uSrc);
621 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
623
624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
626 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
627 /** @todo Most CPUs probably only read the low qword. We read everything to
628 * make sure we apply segmentation and alignment checks correctly.
629 * When we have time, it would be interesting to explore what real
630 * CPUs actually does and whether it will do a TLB load for the high
631 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
632 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
633
634 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
635 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
636 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
637
638 IEM_MC_ADVANCE_RIP();
639 IEM_MC_END();
640 }
641 return VINF_SUCCESS;
642}
643
644
645/**
646 * Common worker for MMX instructions on the form:
647 * pxxxx mm1, mm2/mem64
648 *
649 * The 2nd operand is the second half of a register, which in the memory case
650 * means a 64-bit memory access for MMX.
651 */
652FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
653{
654 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
655 if (IEM_IS_MODRM_REG_MODE(bRm))
656 {
657 /*
658 * Register, register.
659 */
660 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
661 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
663 IEM_MC_BEGIN(2, 0);
664 IEM_MC_ARG(uint64_t *, puDst, 0);
665 IEM_MC_ARG(uint64_t const *, puSrc, 1);
666 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
667 IEM_MC_PREPARE_FPU_USAGE();
668 IEM_MC_FPU_TO_MMX_MODE();
669
670 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
671 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
672 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
673 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
674
675 IEM_MC_ADVANCE_RIP();
676 IEM_MC_END();
677 }
678 else
679 {
680 /*
681 * Register, memory.
682 */
683 IEM_MC_BEGIN(2, 2);
684 IEM_MC_ARG(uint64_t *, puDst, 0);
685 IEM_MC_LOCAL(uint64_t, uSrc);
686 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
688
689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
691 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
692 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
693
694 IEM_MC_PREPARE_FPU_USAGE();
695 IEM_MC_FPU_TO_MMX_MODE();
696
697 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
698 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
699 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
700
701 IEM_MC_ADVANCE_RIP();
702 IEM_MC_END();
703 }
704 return VINF_SUCCESS;
705}
706
707
708/**
709 * Common worker for SSE instructions on the form:
710 * pxxxx xmm1, xmm2/mem128
711 *
712 * The 2nd operand is the second half of a register, which for SSE a 128-bit
713 * aligned access where it may read the full 128 bits or only the upper 64 bits.
714 *
715 * Exceptions type 4.
716 */
717FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
718{
719 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
720 if (IEM_IS_MODRM_REG_MODE(bRm))
721 {
722 /*
723 * Register, register.
724 */
725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
726 IEM_MC_BEGIN(2, 0);
727 IEM_MC_ARG(PRTUINT128U, puDst, 0);
728 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
729 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
730 IEM_MC_PREPARE_SSE_USAGE();
731 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
732 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
733 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
734 IEM_MC_ADVANCE_RIP();
735 IEM_MC_END();
736 }
737 else
738 {
739 /*
740 * Register, memory.
741 */
742 IEM_MC_BEGIN(2, 2);
743 IEM_MC_ARG(PRTUINT128U, puDst, 0);
744 IEM_MC_LOCAL(RTUINT128U, uSrc);
745 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
747
748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
750 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
751 /** @todo Most CPUs probably only read the high qword. We read everything to
752 * make sure we apply segmentation and alignment checks correctly.
753 * When we have time, it would be interesting to explore what real
754 * CPUs actually does and whether it will do a TLB load for the lower
755 * part or skip any associated \#PF. */
756 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
757
758 IEM_MC_PREPARE_SSE_USAGE();
759 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
760 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
761
762 IEM_MC_ADVANCE_RIP();
763 IEM_MC_END();
764 }
765 return VINF_SUCCESS;
766}
767
768
769/**
770 * Common worker for SSE instructions on the forms:
771 * pxxs xmm1, xmm2/mem128
772 *
773 * Proper alignment of the 128-bit operand is enforced.
774 * Exceptions type 2. SSE cpuid checks.
775 *
776 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
777 */
778FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
779{
780 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
781 if (IEM_IS_MODRM_REG_MODE(bRm))
782 {
783 /*
784 * Register, register.
785 */
786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
787 IEM_MC_BEGIN(3, 1);
788 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
789 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
790 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
791 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
792 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
793 IEM_MC_PREPARE_SSE_USAGE();
794 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
795 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
796 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
797 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
798 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
799
800 IEM_MC_ADVANCE_RIP();
801 IEM_MC_END();
802 }
803 else
804 {
805 /*
806 * Register, memory.
807 */
808 IEM_MC_BEGIN(3, 2);
809 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
810 IEM_MC_LOCAL(X86XMMREG, uSrc2);
811 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
812 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
813 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
815
816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
818 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
819 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
820
821 IEM_MC_PREPARE_SSE_USAGE();
822 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
823 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
824 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
825 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
826
827 IEM_MC_ADVANCE_RIP();
828 IEM_MC_END();
829 }
830 return VINF_SUCCESS;
831}
832
833
834/**
835 * Common worker for SSE instructions on the forms:
836 * pxxs xmm1, xmm2/mem32
837 *
838 * Proper alignment of the 128-bit operand is enforced.
839 * Exceptions type 2. SSE cpuid checks.
840 *
841 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
842 */
843FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
844{
845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
846 if (IEM_IS_MODRM_REG_MODE(bRm))
847 {
848 /*
849 * Register, register.
850 */
851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
852 IEM_MC_BEGIN(3, 1);
853 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
854 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
855 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
856 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
857 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
858 IEM_MC_PREPARE_SSE_USAGE();
859 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
860 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
861 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
862 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
863 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
864
865 IEM_MC_ADVANCE_RIP();
866 IEM_MC_END();
867 }
868 else
869 {
870 /*
871 * Register, memory.
872 */
873 IEM_MC_BEGIN(3, 2);
874 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
875 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
876 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
877 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
878 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
880
881 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
883 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
884 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
885
886 IEM_MC_PREPARE_SSE_USAGE();
887 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
888 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
889 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
890 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
891
892 IEM_MC_ADVANCE_RIP();
893 IEM_MC_END();
894 }
895 return VINF_SUCCESS;
896}
897
898
899/**
900 * Common worker for SSE2 instructions on the forms:
901 * pxxd xmm1, xmm2/mem128
902 *
903 * Proper alignment of the 128-bit operand is enforced.
904 * Exceptions type 2. SSE cpuid checks.
905 *
906 * @sa iemOpCommonSseFp_FullFull_To_Full
907 */
908FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
909{
910 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
911 if (IEM_IS_MODRM_REG_MODE(bRm))
912 {
913 /*
914 * Register, register.
915 */
916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
917 IEM_MC_BEGIN(3, 1);
918 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
919 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
920 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
921 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
922 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
923 IEM_MC_PREPARE_SSE_USAGE();
924 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
925 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
926 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
927 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
928 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
929
930 IEM_MC_ADVANCE_RIP();
931 IEM_MC_END();
932 }
933 else
934 {
935 /*
936 * Register, memory.
937 */
938 IEM_MC_BEGIN(3, 2);
939 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
940 IEM_MC_LOCAL(X86XMMREG, uSrc2);
941 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
942 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
943 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
945
946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
948 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
949 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
950
951 IEM_MC_PREPARE_SSE_USAGE();
952 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
953 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
954 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
955 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
956
957 IEM_MC_ADVANCE_RIP();
958 IEM_MC_END();
959 }
960 return VINF_SUCCESS;
961}
962
963
964/**
965 * Common worker for SSE2 instructions on the forms:
966 * pxxs xmm1, xmm2/mem64
967 *
968 * Proper alignment of the 128-bit operand is enforced.
969 * Exceptions type 2. SSE2 cpuid checks.
970 *
971 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
972 */
973FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
974{
975 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
976 if (IEM_IS_MODRM_REG_MODE(bRm))
977 {
978 /*
979 * Register, register.
980 */
981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
982 IEM_MC_BEGIN(3, 1);
983 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
984 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
985 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
986 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
987 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
988 IEM_MC_PREPARE_SSE_USAGE();
989 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
990 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
991 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
992 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
993 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
994
995 IEM_MC_ADVANCE_RIP();
996 IEM_MC_END();
997 }
998 else
999 {
1000 /*
1001 * Register, memory.
1002 */
1003 IEM_MC_BEGIN(3, 2);
1004 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1005 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
1006 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1007 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1008 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
1009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1010
1011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1013 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1014 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1015
1016 IEM_MC_PREPARE_SSE_USAGE();
1017 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1018 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
1019 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1020 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1021
1022 IEM_MC_ADVANCE_RIP();
1023 IEM_MC_END();
1024 }
1025 return VINF_SUCCESS;
1026}
1027
1028
1029/**
1030 * Common worker for SSE2 instructions on the form:
1031 * pxxxx xmm1, xmm2/mem128
1032 *
1033 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1034 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1035 *
1036 * Exceptions type 4.
1037 */
1038FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1039{
1040 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1041 if (IEM_IS_MODRM_REG_MODE(bRm))
1042 {
1043 /*
1044 * Register, register.
1045 */
1046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1047 IEM_MC_BEGIN(2, 0);
1048 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1049 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1050 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1051 IEM_MC_PREPARE_SSE_USAGE();
1052 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1053 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1054 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1055 IEM_MC_ADVANCE_RIP();
1056 IEM_MC_END();
1057 }
1058 else
1059 {
1060 /*
1061 * Register, memory.
1062 */
1063 IEM_MC_BEGIN(2, 2);
1064 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1065 IEM_MC_LOCAL(RTUINT128U, uSrc);
1066 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1068
1069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1071 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1072 /** @todo Most CPUs probably only read the high qword. We read everything to
1073 * make sure we apply segmentation and alignment checks correctly.
1074 * When we have time, it would be interesting to explore what real
1075 * CPUs actually does and whether it will do a TLB load for the lower
1076 * part or skip any associated \#PF. */
1077 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1078
1079 IEM_MC_PREPARE_SSE_USAGE();
1080 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1081 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1082
1083 IEM_MC_ADVANCE_RIP();
1084 IEM_MC_END();
1085 }
1086 return VINF_SUCCESS;
1087}
1088
1089
1090/**
1091 * Common worker for SSE3 instructions on the forms:
1092 * hxxx xmm1, xmm2/mem128
1093 *
1094 * Proper alignment of the 128-bit operand is enforced.
1095 * Exceptions type 2. SSE3 cpuid checks.
1096 *
1097 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1098 */
1099FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1100{
1101 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1102 if (IEM_IS_MODRM_REG_MODE(bRm))
1103 {
1104 /*
1105 * Register, register.
1106 */
1107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1108 IEM_MC_BEGIN(3, 1);
1109 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1110 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1111 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1112 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1113 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1114 IEM_MC_PREPARE_SSE_USAGE();
1115 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1116 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1117 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1118 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1119 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1120
1121 IEM_MC_ADVANCE_RIP();
1122 IEM_MC_END();
1123 }
1124 else
1125 {
1126 /*
1127 * Register, memory.
1128 */
1129 IEM_MC_BEGIN(3, 2);
1130 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1131 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1132 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1133 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1134 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1136
1137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1139 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1140 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1141
1142 IEM_MC_PREPARE_SSE_USAGE();
1143 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1144 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1145 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1146 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1147
1148 IEM_MC_ADVANCE_RIP();
1149 IEM_MC_END();
1150 }
1151 return VINF_SUCCESS;
1152}
1153
1154
1155/** Opcode 0x0f 0x00 /0. */
1156FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1157{
1158 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1159 IEMOP_HLP_MIN_286();
1160 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1161
1162 if (IEM_IS_MODRM_REG_MODE(bRm))
1163 {
1164 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1165 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1166 }
1167
1168 /* Ignore operand size here, memory refs are always 16-bit. */
1169 IEM_MC_BEGIN(2, 0);
1170 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1171 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1173 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1174 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1175 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1176 IEM_MC_END();
1177 return VINF_SUCCESS;
1178}
1179
1180
1181/** Opcode 0x0f 0x00 /1. */
1182FNIEMOPRM_DEF(iemOp_Grp6_str)
1183{
1184 IEMOP_MNEMONIC(str, "str Rv/Mw");
1185 IEMOP_HLP_MIN_286();
1186 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1187
1188
1189 if (IEM_IS_MODRM_REG_MODE(bRm))
1190 {
1191 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1192 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1193 }
1194
1195 /* Ignore operand size here, memory refs are always 16-bit. */
1196 IEM_MC_BEGIN(2, 0);
1197 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1198 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1200 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1201 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1202 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1203 IEM_MC_END();
1204 return VINF_SUCCESS;
1205}
1206
1207
1208/** Opcode 0x0f 0x00 /2. */
1209FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1210{
1211 IEMOP_MNEMONIC(lldt, "lldt Ew");
1212 IEMOP_HLP_MIN_286();
1213 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1214
1215 if (IEM_IS_MODRM_REG_MODE(bRm))
1216 {
1217 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1218 IEM_MC_BEGIN(1, 0);
1219 IEM_MC_ARG(uint16_t, u16Sel, 0);
1220 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1221 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1222 IEM_MC_END();
1223 }
1224 else
1225 {
1226 IEM_MC_BEGIN(1, 1);
1227 IEM_MC_ARG(uint16_t, u16Sel, 0);
1228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1230 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1231 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1232 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1233 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1234 IEM_MC_END();
1235 }
1236 return VINF_SUCCESS;
1237}
1238
1239
1240/** Opcode 0x0f 0x00 /3. */
1241FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1242{
1243 IEMOP_MNEMONIC(ltr, "ltr Ew");
1244 IEMOP_HLP_MIN_286();
1245 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1246
1247 if (IEM_IS_MODRM_REG_MODE(bRm))
1248 {
1249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1250 IEM_MC_BEGIN(1, 0);
1251 IEM_MC_ARG(uint16_t, u16Sel, 0);
1252 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1253 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1254 IEM_MC_END();
1255 }
1256 else
1257 {
1258 IEM_MC_BEGIN(1, 1);
1259 IEM_MC_ARG(uint16_t, u16Sel, 0);
1260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1263 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1264 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1265 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1266 IEM_MC_END();
1267 }
1268 return VINF_SUCCESS;
1269}
1270
1271
1272/** Opcode 0x0f 0x00 /3. */
1273FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1274{
1275 IEMOP_HLP_MIN_286();
1276 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1277
1278 if (IEM_IS_MODRM_REG_MODE(bRm))
1279 {
1280 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1281 IEM_MC_BEGIN(2, 0);
1282 IEM_MC_ARG(uint16_t, u16Sel, 0);
1283 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1284 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1285 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1286 IEM_MC_END();
1287 }
1288 else
1289 {
1290 IEM_MC_BEGIN(2, 1);
1291 IEM_MC_ARG(uint16_t, u16Sel, 0);
1292 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1294 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1295 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1296 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1297 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1298 IEM_MC_END();
1299 }
1300 return VINF_SUCCESS;
1301}
1302
1303
1304/** Opcode 0x0f 0x00 /4. */
1305FNIEMOPRM_DEF(iemOp_Grp6_verr)
1306{
1307 IEMOP_MNEMONIC(verr, "verr Ew");
1308 IEMOP_HLP_MIN_286();
1309 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1310}
1311
1312
1313/** Opcode 0x0f 0x00 /5. */
1314FNIEMOPRM_DEF(iemOp_Grp6_verw)
1315{
1316 IEMOP_MNEMONIC(verw, "verw Ew");
1317 IEMOP_HLP_MIN_286();
1318 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1319}
1320
1321
1322/**
1323 * Group 6 jump table.
1324 */
1325IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1326{
1327 iemOp_Grp6_sldt,
1328 iemOp_Grp6_str,
1329 iemOp_Grp6_lldt,
1330 iemOp_Grp6_ltr,
1331 iemOp_Grp6_verr,
1332 iemOp_Grp6_verw,
1333 iemOp_InvalidWithRM,
1334 iemOp_InvalidWithRM
1335};
1336
1337/** Opcode 0x0f 0x00. */
1338FNIEMOP_DEF(iemOp_Grp6)
1339{
1340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1341 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1342}
1343
1344
1345/** Opcode 0x0f 0x01 /0. */
1346FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1347{
1348 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1349 IEMOP_HLP_MIN_286();
1350 IEMOP_HLP_64BIT_OP_SIZE();
1351 IEM_MC_BEGIN(2, 1);
1352 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1353 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1356 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1357 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1358 IEM_MC_END();
1359 return VINF_SUCCESS;
1360}
1361
1362
1363/** Opcode 0x0f 0x01 /0. */
1364FNIEMOP_DEF(iemOp_Grp7_vmcall)
1365{
1366 IEMOP_MNEMONIC(vmcall, "vmcall");
1367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1368
1369 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1370 want all hypercalls regardless of instruction used, and if a
1371 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1372 (NEM/win makes ASSUMPTIONS about this behavior.) */
1373 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
1374}
1375
1376
1377/** Opcode 0x0f 0x01 /0. */
1378#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1379FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1380{
1381 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1382 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1383 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1384 IEMOP_HLP_DONE_DECODING();
1385 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
1386}
1387#else
1388FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1389{
1390 IEMOP_BITCH_ABOUT_STUB();
1391 return IEMOP_RAISE_INVALID_OPCODE();
1392}
1393#endif
1394
1395
1396/** Opcode 0x0f 0x01 /0. */
1397#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1398FNIEMOP_DEF(iemOp_Grp7_vmresume)
1399{
1400 IEMOP_MNEMONIC(vmresume, "vmresume");
1401 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1402 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1403 IEMOP_HLP_DONE_DECODING();
1404 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume);
1405}
1406#else
1407FNIEMOP_DEF(iemOp_Grp7_vmresume)
1408{
1409 IEMOP_BITCH_ABOUT_STUB();
1410 return IEMOP_RAISE_INVALID_OPCODE();
1411}
1412#endif
1413
1414
1415/** Opcode 0x0f 0x01 /0. */
1416#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1417FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1418{
1419 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1420 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1421 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1422 IEMOP_HLP_DONE_DECODING();
1423 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
1424}
1425#else
1426FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1427{
1428 IEMOP_BITCH_ABOUT_STUB();
1429 return IEMOP_RAISE_INVALID_OPCODE();
1430}
1431#endif
1432
1433
1434/** Opcode 0x0f 0x01 /1. */
1435FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1436{
1437 IEMOP_MNEMONIC(sidt, "sidt Ms");
1438 IEMOP_HLP_MIN_286();
1439 IEMOP_HLP_64BIT_OP_SIZE();
1440 IEM_MC_BEGIN(2, 1);
1441 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1442 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1445 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1446 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1447 IEM_MC_END();
1448 return VINF_SUCCESS;
1449}
1450
1451
1452/** Opcode 0x0f 0x01 /1. */
1453FNIEMOP_DEF(iemOp_Grp7_monitor)
1454{
1455 IEMOP_MNEMONIC(monitor, "monitor");
1456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1457 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1458}
1459
1460
1461/** Opcode 0x0f 0x01 /1. */
1462FNIEMOP_DEF(iemOp_Grp7_mwait)
1463{
1464 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1466 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
1467}
1468
1469
1470/** Opcode 0x0f 0x01 /2. */
1471FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1472{
1473 IEMOP_MNEMONIC(lgdt, "lgdt");
1474 IEMOP_HLP_64BIT_OP_SIZE();
1475 IEM_MC_BEGIN(3, 1);
1476 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1477 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1478 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1481 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1482 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1483 IEM_MC_END();
1484 return VINF_SUCCESS;
1485}
1486
1487
1488/** Opcode 0x0f 0x01 0xd0. */
1489FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1490{
1491 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1492 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1493 {
1494 /** @todo r=ramshankar: We should use
1495 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1496 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1497 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1498 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1499 }
1500 return IEMOP_RAISE_INVALID_OPCODE();
1501}
1502
1503
1504/** Opcode 0x0f 0x01 0xd1. */
1505FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1506{
1507 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1508 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1509 {
1510 /** @todo r=ramshankar: We should use
1511 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1512 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1513 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1514 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1515 }
1516 return IEMOP_RAISE_INVALID_OPCODE();
1517}
1518
1519
1520/** Opcode 0x0f 0x01 /3. */
1521FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1522{
1523 IEMOP_MNEMONIC(lidt, "lidt");
1524 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1525 ? IEMMODE_64BIT
1526 : pVCpu->iem.s.enmEffOpSize;
1527 IEM_MC_BEGIN(3, 1);
1528 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1529 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1530 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1533 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1534 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1535 IEM_MC_END();
1536 return VINF_SUCCESS;
1537}
1538
1539
1540/** Opcode 0x0f 0x01 0xd8. */
1541#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1542FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1543{
1544 IEMOP_MNEMONIC(vmrun, "vmrun");
1545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1546 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
1547}
1548#else
1549FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1550#endif
1551
1552/** Opcode 0x0f 0x01 0xd9. */
1553FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1554{
1555 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1557
1558 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1559 want all hypercalls regardless of instruction used, and if a
1560 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1561 (NEM/win makes ASSUMPTIONS about this behavior.) */
1562 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
1563}
1564
1565/** Opcode 0x0f 0x01 0xda. */
1566#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1567FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1568{
1569 IEMOP_MNEMONIC(vmload, "vmload");
1570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1571 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
1572}
1573#else
1574FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1575#endif
1576
1577
1578/** Opcode 0x0f 0x01 0xdb. */
1579#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1580FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1581{
1582 IEMOP_MNEMONIC(vmsave, "vmsave");
1583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1584 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
1585}
1586#else
1587FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1588#endif
1589
1590
1591/** Opcode 0x0f 0x01 0xdc. */
1592#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1593FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1594{
1595 IEMOP_MNEMONIC(stgi, "stgi");
1596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1597 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
1598}
1599#else
1600FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1601#endif
1602
1603
1604/** Opcode 0x0f 0x01 0xdd. */
1605#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1606FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1607{
1608 IEMOP_MNEMONIC(clgi, "clgi");
1609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1610 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
1611}
1612#else
1613FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1614#endif
1615
1616
1617/** Opcode 0x0f 0x01 0xdf. */
1618#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1619FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1620{
1621 IEMOP_MNEMONIC(invlpga, "invlpga");
1622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1623 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
1624}
1625#else
1626FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1627#endif
1628
1629
1630/** Opcode 0x0f 0x01 0xde. */
1631#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1632FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1633{
1634 IEMOP_MNEMONIC(skinit, "skinit");
1635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1636 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
1637}
1638#else
1639FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1640#endif
1641
1642
1643/** Opcode 0x0f 0x01 /4. */
1644FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1645{
1646 IEMOP_MNEMONIC(smsw, "smsw");
1647 IEMOP_HLP_MIN_286();
1648 if (IEM_IS_MODRM_REG_MODE(bRm))
1649 {
1650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1651 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1652 }
1653
1654 /* Ignore operand size here, memory refs are always 16-bit. */
1655 IEM_MC_BEGIN(2, 0);
1656 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1657 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1660 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1661 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1662 IEM_MC_END();
1663 return VINF_SUCCESS;
1664}
1665
1666
1667/** Opcode 0x0f 0x01 /6. */
1668FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1669{
1670 /* The operand size is effectively ignored, all is 16-bit and only the
1671 lower 3-bits are used. */
1672 IEMOP_MNEMONIC(lmsw, "lmsw");
1673 IEMOP_HLP_MIN_286();
1674 if (IEM_IS_MODRM_REG_MODE(bRm))
1675 {
1676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1677 IEM_MC_BEGIN(2, 0);
1678 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1679 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1680 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1681 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1682 IEM_MC_END();
1683 }
1684 else
1685 {
1686 IEM_MC_BEGIN(2, 0);
1687 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1688 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1691 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1692 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1693 IEM_MC_END();
1694 }
1695 return VINF_SUCCESS;
1696}
1697
1698
1699/** Opcode 0x0f 0x01 /7. */
1700FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1701{
1702 IEMOP_MNEMONIC(invlpg, "invlpg");
1703 IEMOP_HLP_MIN_486();
1704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1705 IEM_MC_BEGIN(1, 1);
1706 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1708 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1709 IEM_MC_END();
1710 return VINF_SUCCESS;
1711}
1712
1713
1714/** Opcode 0x0f 0x01 /7. */
1715FNIEMOP_DEF(iemOp_Grp7_swapgs)
1716{
1717 IEMOP_MNEMONIC(swapgs, "swapgs");
1718 IEMOP_HLP_ONLY_64BIT();
1719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1720 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1721}
1722
1723
1724/** Opcode 0x0f 0x01 /7. */
1725FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1726{
1727 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1729 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
1730}
1731
1732
1733/**
1734 * Group 7 jump table, memory variant.
1735 */
1736IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1737{
1738 iemOp_Grp7_sgdt,
1739 iemOp_Grp7_sidt,
1740 iemOp_Grp7_lgdt,
1741 iemOp_Grp7_lidt,
1742 iemOp_Grp7_smsw,
1743 iemOp_InvalidWithRM,
1744 iemOp_Grp7_lmsw,
1745 iemOp_Grp7_invlpg
1746};
1747
1748
1749/** Opcode 0x0f 0x01. */
1750FNIEMOP_DEF(iemOp_Grp7)
1751{
1752 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1753 if (IEM_IS_MODRM_MEM_MODE(bRm))
1754 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1755
1756 switch (IEM_GET_MODRM_REG_8(bRm))
1757 {
1758 case 0:
1759 switch (IEM_GET_MODRM_RM_8(bRm))
1760 {
1761 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1762 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1763 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1764 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1765 }
1766 return IEMOP_RAISE_INVALID_OPCODE();
1767
1768 case 1:
1769 switch (IEM_GET_MODRM_RM_8(bRm))
1770 {
1771 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1772 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1773 }
1774 return IEMOP_RAISE_INVALID_OPCODE();
1775
1776 case 2:
1777 switch (IEM_GET_MODRM_RM_8(bRm))
1778 {
1779 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1780 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1781 }
1782 return IEMOP_RAISE_INVALID_OPCODE();
1783
1784 case 3:
1785 switch (IEM_GET_MODRM_RM_8(bRm))
1786 {
1787 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1788 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1789 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1790 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1791 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1792 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1793 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1794 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1795 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1796 }
1797
1798 case 4:
1799 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1800
1801 case 5:
1802 return IEMOP_RAISE_INVALID_OPCODE();
1803
1804 case 6:
1805 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1806
1807 case 7:
1808 switch (IEM_GET_MODRM_RM_8(bRm))
1809 {
1810 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1811 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1812 }
1813 return IEMOP_RAISE_INVALID_OPCODE();
1814
1815 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1816 }
1817}
1818
1819/** Opcode 0x0f 0x00 /3. */
1820FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1821{
1822 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1824
1825 if (IEM_IS_MODRM_REG_MODE(bRm))
1826 {
1827 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1828 switch (pVCpu->iem.s.enmEffOpSize)
1829 {
1830 case IEMMODE_16BIT:
1831 {
1832 IEM_MC_BEGIN(3, 0);
1833 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1834 IEM_MC_ARG(uint16_t, u16Sel, 1);
1835 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1836
1837 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1838 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1839 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1840
1841 IEM_MC_END();
1842 return VINF_SUCCESS;
1843 }
1844
1845 case IEMMODE_32BIT:
1846 case IEMMODE_64BIT:
1847 {
1848 IEM_MC_BEGIN(3, 0);
1849 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1850 IEM_MC_ARG(uint16_t, u16Sel, 1);
1851 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1852
1853 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1854 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1855 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1856
1857 IEM_MC_END();
1858 return VINF_SUCCESS;
1859 }
1860
1861 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1862 }
1863 }
1864 else
1865 {
1866 switch (pVCpu->iem.s.enmEffOpSize)
1867 {
1868 case IEMMODE_16BIT:
1869 {
1870 IEM_MC_BEGIN(3, 1);
1871 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1872 IEM_MC_ARG(uint16_t, u16Sel, 1);
1873 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1875
1876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1877 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1878
1879 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1880 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1881 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1882
1883 IEM_MC_END();
1884 return VINF_SUCCESS;
1885 }
1886
1887 case IEMMODE_32BIT:
1888 case IEMMODE_64BIT:
1889 {
1890 IEM_MC_BEGIN(3, 1);
1891 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1892 IEM_MC_ARG(uint16_t, u16Sel, 1);
1893 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1895
1896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1897 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1898/** @todo testcase: make sure it's a 16-bit read. */
1899
1900 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1901 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1902 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1903
1904 IEM_MC_END();
1905 return VINF_SUCCESS;
1906 }
1907
1908 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1909 }
1910 }
1911}
1912
1913
1914
1915/** Opcode 0x0f 0x02. */
1916FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1917{
1918 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1919 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1920}
1921
1922
1923/** Opcode 0x0f 0x03. */
1924FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1925{
1926 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1927 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1928}
1929
1930
1931/** Opcode 0x0f 0x05. */
1932FNIEMOP_DEF(iemOp_syscall)
1933{
1934 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1936 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1937}
1938
1939
1940/** Opcode 0x0f 0x06. */
1941FNIEMOP_DEF(iemOp_clts)
1942{
1943 IEMOP_MNEMONIC(clts, "clts");
1944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1945 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1946}
1947
1948
1949/** Opcode 0x0f 0x07. */
1950FNIEMOP_DEF(iemOp_sysret)
1951{
1952 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1954 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1955}
1956
1957
1958/** Opcode 0x0f 0x08. */
1959FNIEMOP_DEF(iemOp_invd)
1960{
1961 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1962 IEMOP_HLP_MIN_486();
1963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1964 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
1965}
1966
1967
1968/** Opcode 0x0f 0x09. */
1969FNIEMOP_DEF(iemOp_wbinvd)
1970{
1971 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1972 IEMOP_HLP_MIN_486();
1973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1974 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
1975}
1976
1977
1978/** Opcode 0x0f 0x0b. */
1979FNIEMOP_DEF(iemOp_ud2)
1980{
1981 IEMOP_MNEMONIC(ud2, "ud2");
1982 return IEMOP_RAISE_INVALID_OPCODE();
1983}
1984
1985/** Opcode 0x0f 0x0d. */
1986FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1987{
1988 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1989 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1990 {
1991 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1992 return IEMOP_RAISE_INVALID_OPCODE();
1993 }
1994
1995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1996 if (IEM_IS_MODRM_REG_MODE(bRm))
1997 {
1998 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1999 return IEMOP_RAISE_INVALID_OPCODE();
2000 }
2001
2002 switch (IEM_GET_MODRM_REG_8(bRm))
2003 {
2004 case 2: /* Aliased to /0 for the time being. */
2005 case 4: /* Aliased to /0 for the time being. */
2006 case 5: /* Aliased to /0 for the time being. */
2007 case 6: /* Aliased to /0 for the time being. */
2008 case 7: /* Aliased to /0 for the time being. */
2009 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
2010 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
2011 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
2012 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2013 }
2014
2015 IEM_MC_BEGIN(0, 1);
2016 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2019 /* Currently a NOP. */
2020 NOREF(GCPtrEffSrc);
2021 IEM_MC_ADVANCE_RIP();
2022 IEM_MC_END();
2023 return VINF_SUCCESS;
2024}
2025
2026
2027/** Opcode 0x0f 0x0e. */
2028FNIEMOP_DEF(iemOp_femms)
2029{
2030 IEMOP_MNEMONIC(femms, "femms");
2031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2032
2033 IEM_MC_BEGIN(0,0);
2034 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2035 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2036 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2037 IEM_MC_FPU_FROM_MMX_MODE();
2038 IEM_MC_ADVANCE_RIP();
2039 IEM_MC_END();
2040 return VINF_SUCCESS;
2041}
2042
2043
2044/** Opcode 0x0f 0x0f. */
2045FNIEMOP_DEF(iemOp_3Dnow)
2046{
2047 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2048 {
2049 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2050 return IEMOP_RAISE_INVALID_OPCODE();
2051 }
2052
2053#ifdef IEM_WITH_3DNOW
2054 /* This is pretty sparse, use switch instead of table. */
2055 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2056 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2057#else
2058 IEMOP_BITCH_ABOUT_STUB();
2059 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2060#endif
2061}
2062
2063
2064/**
2065 * @opcode 0x10
2066 * @oppfx none
2067 * @opcpuid sse
2068 * @opgroup og_sse_simdfp_datamove
2069 * @opxcpttype 4UA
2070 * @optest op1=1 op2=2 -> op1=2
2071 * @optest op1=0 op2=-22 -> op1=-22
2072 */
2073FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2074{
2075 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2076 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2077 if (IEM_IS_MODRM_REG_MODE(bRm))
2078 {
2079 /*
2080 * Register, register.
2081 */
2082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2083 IEM_MC_BEGIN(0, 0);
2084 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2085 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2086 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2087 IEM_GET_MODRM_RM(pVCpu, bRm));
2088 IEM_MC_ADVANCE_RIP();
2089 IEM_MC_END();
2090 }
2091 else
2092 {
2093 /*
2094 * Memory, register.
2095 */
2096 IEM_MC_BEGIN(0, 2);
2097 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2099
2100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2102 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2103 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2104
2105 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2106 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2107
2108 IEM_MC_ADVANCE_RIP();
2109 IEM_MC_END();
2110 }
2111 return VINF_SUCCESS;
2112
2113}
2114
2115
2116/**
2117 * @opcode 0x10
2118 * @oppfx 0x66
2119 * @opcpuid sse2
2120 * @opgroup og_sse2_pcksclr_datamove
2121 * @opxcpttype 4UA
2122 * @optest op1=1 op2=2 -> op1=2
2123 * @optest op1=0 op2=-42 -> op1=-42
2124 */
2125FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2126{
2127 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2129 if (IEM_IS_MODRM_REG_MODE(bRm))
2130 {
2131 /*
2132 * Register, register.
2133 */
2134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2135 IEM_MC_BEGIN(0, 0);
2136 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2137 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2138 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2139 IEM_GET_MODRM_RM(pVCpu, bRm));
2140 IEM_MC_ADVANCE_RIP();
2141 IEM_MC_END();
2142 }
2143 else
2144 {
2145 /*
2146 * Memory, register.
2147 */
2148 IEM_MC_BEGIN(0, 2);
2149 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2151
2152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2154 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2155 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2156
2157 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2158 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2159
2160 IEM_MC_ADVANCE_RIP();
2161 IEM_MC_END();
2162 }
2163 return VINF_SUCCESS;
2164}
2165
2166
2167/**
2168 * @opcode 0x10
2169 * @oppfx 0xf3
2170 * @opcpuid sse
2171 * @opgroup og_sse_simdfp_datamove
2172 * @opxcpttype 5
2173 * @optest op1=1 op2=2 -> op1=2
2174 * @optest op1=0 op2=-22 -> op1=-22
2175 */
2176FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2177{
2178 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2180 if (IEM_IS_MODRM_REG_MODE(bRm))
2181 {
2182 /*
2183 * Register, register.
2184 */
2185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2186 IEM_MC_BEGIN(0, 1);
2187 IEM_MC_LOCAL(uint32_t, uSrc);
2188
2189 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2190 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2191 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2192 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2193
2194 IEM_MC_ADVANCE_RIP();
2195 IEM_MC_END();
2196 }
2197 else
2198 {
2199 /*
2200 * Memory, register.
2201 */
2202 IEM_MC_BEGIN(0, 2);
2203 IEM_MC_LOCAL(uint32_t, uSrc);
2204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2205
2206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2208 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2209 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2210
2211 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2212 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2213
2214 IEM_MC_ADVANCE_RIP();
2215 IEM_MC_END();
2216 }
2217 return VINF_SUCCESS;
2218}
2219
2220
2221/**
2222 * @opcode 0x10
2223 * @oppfx 0xf2
2224 * @opcpuid sse2
2225 * @opgroup og_sse2_pcksclr_datamove
2226 * @opxcpttype 5
2227 * @optest op1=1 op2=2 -> op1=2
2228 * @optest op1=0 op2=-42 -> op1=-42
2229 */
2230FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2231{
2232 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2234 if (IEM_IS_MODRM_REG_MODE(bRm))
2235 {
2236 /*
2237 * Register, register.
2238 */
2239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2240 IEM_MC_BEGIN(0, 1);
2241 IEM_MC_LOCAL(uint64_t, uSrc);
2242
2243 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2244 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2245 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2246 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2247
2248 IEM_MC_ADVANCE_RIP();
2249 IEM_MC_END();
2250 }
2251 else
2252 {
2253 /*
2254 * Memory, register.
2255 */
2256 IEM_MC_BEGIN(0, 2);
2257 IEM_MC_LOCAL(uint64_t, uSrc);
2258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2259
2260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2262 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2263 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2264
2265 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2266 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2267
2268 IEM_MC_ADVANCE_RIP();
2269 IEM_MC_END();
2270 }
2271 return VINF_SUCCESS;
2272}
2273
2274
2275/**
2276 * @opcode 0x11
2277 * @oppfx none
2278 * @opcpuid sse
2279 * @opgroup og_sse_simdfp_datamove
2280 * @opxcpttype 4UA
2281 * @optest op1=1 op2=2 -> op1=2
2282 * @optest op1=0 op2=-42 -> op1=-42
2283 */
2284FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2285{
2286 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2287 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2288 if (IEM_IS_MODRM_REG_MODE(bRm))
2289 {
2290 /*
2291 * Register, register.
2292 */
2293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2294 IEM_MC_BEGIN(0, 0);
2295 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2296 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2297 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2298 IEM_GET_MODRM_REG(pVCpu, bRm));
2299 IEM_MC_ADVANCE_RIP();
2300 IEM_MC_END();
2301 }
2302 else
2303 {
2304 /*
2305 * Memory, register.
2306 */
2307 IEM_MC_BEGIN(0, 2);
2308 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2310
2311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2313 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2314 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2315
2316 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2317 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2318
2319 IEM_MC_ADVANCE_RIP();
2320 IEM_MC_END();
2321 }
2322 return VINF_SUCCESS;
2323}
2324
2325
2326/**
2327 * @opcode 0x11
2328 * @oppfx 0x66
2329 * @opcpuid sse2
2330 * @opgroup og_sse2_pcksclr_datamove
2331 * @opxcpttype 4UA
2332 * @optest op1=1 op2=2 -> op1=2
2333 * @optest op1=0 op2=-42 -> op1=-42
2334 */
2335FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2336{
2337 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2338 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2339 if (IEM_IS_MODRM_REG_MODE(bRm))
2340 {
2341 /*
2342 * Register, register.
2343 */
2344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2345 IEM_MC_BEGIN(0, 0);
2346 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2347 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2348 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2349 IEM_GET_MODRM_REG(pVCpu, bRm));
2350 IEM_MC_ADVANCE_RIP();
2351 IEM_MC_END();
2352 }
2353 else
2354 {
2355 /*
2356 * Memory, register.
2357 */
2358 IEM_MC_BEGIN(0, 2);
2359 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2361
2362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2364 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2365 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2366
2367 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2368 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2369
2370 IEM_MC_ADVANCE_RIP();
2371 IEM_MC_END();
2372 }
2373 return VINF_SUCCESS;
2374}
2375
2376
2377/**
2378 * @opcode 0x11
2379 * @oppfx 0xf3
2380 * @opcpuid sse
2381 * @opgroup og_sse_simdfp_datamove
2382 * @opxcpttype 5
2383 * @optest op1=1 op2=2 -> op1=2
2384 * @optest op1=0 op2=-22 -> op1=-22
2385 */
2386FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2387{
2388 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2390 if (IEM_IS_MODRM_REG_MODE(bRm))
2391 {
2392 /*
2393 * Register, register.
2394 */
2395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2396 IEM_MC_BEGIN(0, 1);
2397 IEM_MC_LOCAL(uint32_t, uSrc);
2398
2399 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2400 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2401 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2402 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2403
2404 IEM_MC_ADVANCE_RIP();
2405 IEM_MC_END();
2406 }
2407 else
2408 {
2409 /*
2410 * Memory, register.
2411 */
2412 IEM_MC_BEGIN(0, 2);
2413 IEM_MC_LOCAL(uint32_t, uSrc);
2414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2415
2416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2418 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2419 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2420
2421 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2422 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2423
2424 IEM_MC_ADVANCE_RIP();
2425 IEM_MC_END();
2426 }
2427 return VINF_SUCCESS;
2428}
2429
2430
2431/**
2432 * @opcode 0x11
2433 * @oppfx 0xf2
2434 * @opcpuid sse2
2435 * @opgroup og_sse2_pcksclr_datamove
2436 * @opxcpttype 5
2437 * @optest op1=1 op2=2 -> op1=2
2438 * @optest op1=0 op2=-42 -> op1=-42
2439 */
2440FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2441{
2442 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2443 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2444 if (IEM_IS_MODRM_REG_MODE(bRm))
2445 {
2446 /*
2447 * Register, register.
2448 */
2449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2450 IEM_MC_BEGIN(0, 1);
2451 IEM_MC_LOCAL(uint64_t, uSrc);
2452
2453 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2454 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2455 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2456 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2457
2458 IEM_MC_ADVANCE_RIP();
2459 IEM_MC_END();
2460 }
2461 else
2462 {
2463 /*
2464 * Memory, register.
2465 */
2466 IEM_MC_BEGIN(0, 2);
2467 IEM_MC_LOCAL(uint64_t, uSrc);
2468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2469
2470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2472 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2473 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2474
2475 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2476 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2477
2478 IEM_MC_ADVANCE_RIP();
2479 IEM_MC_END();
2480 }
2481 return VINF_SUCCESS;
2482}
2483
2484
2485FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2486{
2487 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2488 if (IEM_IS_MODRM_REG_MODE(bRm))
2489 {
2490 /**
2491 * @opcode 0x12
2492 * @opcodesub 11 mr/reg
2493 * @oppfx none
2494 * @opcpuid sse
2495 * @opgroup og_sse_simdfp_datamove
2496 * @opxcpttype 5
2497 * @optest op1=1 op2=2 -> op1=2
2498 * @optest op1=0 op2=-42 -> op1=-42
2499 */
2500 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2501
2502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2503 IEM_MC_BEGIN(0, 1);
2504 IEM_MC_LOCAL(uint64_t, uSrc);
2505
2506 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2507 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2508 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2509 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2510
2511 IEM_MC_ADVANCE_RIP();
2512 IEM_MC_END();
2513 }
2514 else
2515 {
2516 /**
2517 * @opdone
2518 * @opcode 0x12
2519 * @opcodesub !11 mr/reg
2520 * @oppfx none
2521 * @opcpuid sse
2522 * @opgroup og_sse_simdfp_datamove
2523 * @opxcpttype 5
2524 * @optest op1=1 op2=2 -> op1=2
2525 * @optest op1=0 op2=-42 -> op1=-42
2526 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2527 */
2528 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2529
2530 IEM_MC_BEGIN(0, 2);
2531 IEM_MC_LOCAL(uint64_t, uSrc);
2532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2533
2534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2536 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2537 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2538
2539 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2540 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2541
2542 IEM_MC_ADVANCE_RIP();
2543 IEM_MC_END();
2544 }
2545 return VINF_SUCCESS;
2546}
2547
2548
2549/**
2550 * @opcode 0x12
2551 * @opcodesub !11 mr/reg
2552 * @oppfx 0x66
2553 * @opcpuid sse2
2554 * @opgroup og_sse2_pcksclr_datamove
2555 * @opxcpttype 5
2556 * @optest op1=1 op2=2 -> op1=2
2557 * @optest op1=0 op2=-42 -> op1=-42
2558 */
2559FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2560{
2561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2562 if (IEM_IS_MODRM_MEM_MODE(bRm))
2563 {
2564 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2565
2566 IEM_MC_BEGIN(0, 2);
2567 IEM_MC_LOCAL(uint64_t, uSrc);
2568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2569
2570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2572 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2573 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2574
2575 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2576 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2577
2578 IEM_MC_ADVANCE_RIP();
2579 IEM_MC_END();
2580 return VINF_SUCCESS;
2581 }
2582
2583 /**
2584 * @opdone
2585 * @opmnemonic ud660f12m3
2586 * @opcode 0x12
2587 * @opcodesub 11 mr/reg
2588 * @oppfx 0x66
2589 * @opunused immediate
2590 * @opcpuid sse
2591 * @optest ->
2592 */
2593 return IEMOP_RAISE_INVALID_OPCODE();
2594}
2595
2596
2597/**
2598 * @opcode 0x12
2599 * @oppfx 0xf3
2600 * @opcpuid sse3
2601 * @opgroup og_sse3_pcksclr_datamove
2602 * @opxcpttype 4
2603 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2604 * op1=0x00000002000000020000000100000001
2605 */
2606FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2607{
2608 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2609 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2610 if (IEM_IS_MODRM_REG_MODE(bRm))
2611 {
2612 /*
2613 * Register, register.
2614 */
2615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2616 IEM_MC_BEGIN(2, 0);
2617 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2618 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2619
2620 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2621 IEM_MC_PREPARE_SSE_USAGE();
2622
2623 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2624 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2625 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2626
2627 IEM_MC_ADVANCE_RIP();
2628 IEM_MC_END();
2629 }
2630 else
2631 {
2632 /*
2633 * Register, memory.
2634 */
2635 IEM_MC_BEGIN(2, 2);
2636 IEM_MC_LOCAL(RTUINT128U, uSrc);
2637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2638 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2639 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2640
2641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2643 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2644 IEM_MC_PREPARE_SSE_USAGE();
2645
2646 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2647 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2648 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2649
2650 IEM_MC_ADVANCE_RIP();
2651 IEM_MC_END();
2652 }
2653 return VINF_SUCCESS;
2654}
2655
2656
2657/**
2658 * @opcode 0x12
2659 * @oppfx 0xf2
2660 * @opcpuid sse3
2661 * @opgroup og_sse3_pcksclr_datamove
2662 * @opxcpttype 5
2663 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2664 * op1=0x22222222111111112222222211111111
2665 */
2666FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2667{
2668 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2669 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2670 if (IEM_IS_MODRM_REG_MODE(bRm))
2671 {
2672 /*
2673 * Register, register.
2674 */
2675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2676 IEM_MC_BEGIN(2, 0);
2677 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2678 IEM_MC_ARG(uint64_t, uSrc, 1);
2679
2680 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2681 IEM_MC_PREPARE_SSE_USAGE();
2682
2683 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2684 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2685 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2686
2687 IEM_MC_ADVANCE_RIP();
2688 IEM_MC_END();
2689 }
2690 else
2691 {
2692 /*
2693 * Register, memory.
2694 */
2695 IEM_MC_BEGIN(2, 2);
2696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2697 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2698 IEM_MC_ARG(uint64_t, uSrc, 1);
2699
2700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2702 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2703 IEM_MC_PREPARE_SSE_USAGE();
2704
2705 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2706 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2707 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2708
2709 IEM_MC_ADVANCE_RIP();
2710 IEM_MC_END();
2711 }
2712 return VINF_SUCCESS;
2713}
2714
2715
2716/**
2717 * @opcode 0x13
2718 * @opcodesub !11 mr/reg
2719 * @oppfx none
2720 * @opcpuid sse
2721 * @opgroup og_sse_simdfp_datamove
2722 * @opxcpttype 5
2723 * @optest op1=1 op2=2 -> op1=2
2724 * @optest op1=0 op2=-42 -> op1=-42
2725 */
2726FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2727{
2728 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2729 if (IEM_IS_MODRM_MEM_MODE(bRm))
2730 {
2731 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2732
2733 IEM_MC_BEGIN(0, 2);
2734 IEM_MC_LOCAL(uint64_t, uSrc);
2735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2736
2737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2739 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2740 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2741
2742 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2743 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2744
2745 IEM_MC_ADVANCE_RIP();
2746 IEM_MC_END();
2747 return VINF_SUCCESS;
2748 }
2749
2750 /**
2751 * @opdone
2752 * @opmnemonic ud0f13m3
2753 * @opcode 0x13
2754 * @opcodesub 11 mr/reg
2755 * @oppfx none
2756 * @opunused immediate
2757 * @opcpuid sse
2758 * @optest ->
2759 */
2760 return IEMOP_RAISE_INVALID_OPCODE();
2761}
2762
2763
2764/**
2765 * @opcode 0x13
2766 * @opcodesub !11 mr/reg
2767 * @oppfx 0x66
2768 * @opcpuid sse2
2769 * @opgroup og_sse2_pcksclr_datamove
2770 * @opxcpttype 5
2771 * @optest op1=1 op2=2 -> op1=2
2772 * @optest op1=0 op2=-42 -> op1=-42
2773 */
2774FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2775{
2776 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2777 if (IEM_IS_MODRM_MEM_MODE(bRm))
2778 {
2779 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2780 IEM_MC_BEGIN(0, 2);
2781 IEM_MC_LOCAL(uint64_t, uSrc);
2782 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2783
2784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2786 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2787 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2788
2789 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2790 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2791
2792 IEM_MC_ADVANCE_RIP();
2793 IEM_MC_END();
2794 return VINF_SUCCESS;
2795 }
2796
2797 /**
2798 * @opdone
2799 * @opmnemonic ud660f13m3
2800 * @opcode 0x13
2801 * @opcodesub 11 mr/reg
2802 * @oppfx 0x66
2803 * @opunused immediate
2804 * @opcpuid sse
2805 * @optest ->
2806 */
2807 return IEMOP_RAISE_INVALID_OPCODE();
2808}
2809
2810
2811/**
2812 * @opmnemonic udf30f13
2813 * @opcode 0x13
2814 * @oppfx 0xf3
2815 * @opunused intel-modrm
2816 * @opcpuid sse
2817 * @optest ->
2818 * @opdone
2819 */
2820
2821/**
2822 * @opmnemonic udf20f13
2823 * @opcode 0x13
2824 * @oppfx 0xf2
2825 * @opunused intel-modrm
2826 * @opcpuid sse
2827 * @optest ->
2828 * @opdone
2829 */
2830
2831/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2832FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2833{
2834 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2835 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2836}
2837
2838
2839/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2840FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2841{
2842 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2843 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2844}
2845
2846
2847/**
2848 * @opdone
2849 * @opmnemonic udf30f14
2850 * @opcode 0x14
2851 * @oppfx 0xf3
2852 * @opunused intel-modrm
2853 * @opcpuid sse
2854 * @optest ->
2855 * @opdone
2856 */
2857
2858/**
2859 * @opmnemonic udf20f14
2860 * @opcode 0x14
2861 * @oppfx 0xf2
2862 * @opunused intel-modrm
2863 * @opcpuid sse
2864 * @optest ->
2865 * @opdone
2866 */
2867
2868/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2869FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2870{
2871 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2872 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2873}
2874
2875
2876/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2877FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2878{
2879 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2880 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2881}
2882
2883
2884/* Opcode 0xf3 0x0f 0x15 - invalid */
2885/* Opcode 0xf2 0x0f 0x15 - invalid */
2886
2887/**
2888 * @opdone
2889 * @opmnemonic udf30f15
2890 * @opcode 0x15
2891 * @oppfx 0xf3
2892 * @opunused intel-modrm
2893 * @opcpuid sse
2894 * @optest ->
2895 * @opdone
2896 */
2897
2898/**
2899 * @opmnemonic udf20f15
2900 * @opcode 0x15
2901 * @oppfx 0xf2
2902 * @opunused intel-modrm
2903 * @opcpuid sse
2904 * @optest ->
2905 * @opdone
2906 */
2907
2908FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2909{
2910 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2911 if (IEM_IS_MODRM_REG_MODE(bRm))
2912 {
2913 /**
2914 * @opcode 0x16
2915 * @opcodesub 11 mr/reg
2916 * @oppfx none
2917 * @opcpuid sse
2918 * @opgroup og_sse_simdfp_datamove
2919 * @opxcpttype 5
2920 * @optest op1=1 op2=2 -> op1=2
2921 * @optest op1=0 op2=-42 -> op1=-42
2922 */
2923 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2924
2925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2926 IEM_MC_BEGIN(0, 1);
2927 IEM_MC_LOCAL(uint64_t, uSrc);
2928
2929 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2930 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2931 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2932 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2933
2934 IEM_MC_ADVANCE_RIP();
2935 IEM_MC_END();
2936 }
2937 else
2938 {
2939 /**
2940 * @opdone
2941 * @opcode 0x16
2942 * @opcodesub !11 mr/reg
2943 * @oppfx none
2944 * @opcpuid sse
2945 * @opgroup og_sse_simdfp_datamove
2946 * @opxcpttype 5
2947 * @optest op1=1 op2=2 -> op1=2
2948 * @optest op1=0 op2=-42 -> op1=-42
2949 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2950 */
2951 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2952
2953 IEM_MC_BEGIN(0, 2);
2954 IEM_MC_LOCAL(uint64_t, uSrc);
2955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2956
2957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2959 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2960 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2961
2962 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2963 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2964
2965 IEM_MC_ADVANCE_RIP();
2966 IEM_MC_END();
2967 }
2968 return VINF_SUCCESS;
2969}
2970
2971
2972/**
2973 * @opcode 0x16
2974 * @opcodesub !11 mr/reg
2975 * @oppfx 0x66
2976 * @opcpuid sse2
2977 * @opgroup og_sse2_pcksclr_datamove
2978 * @opxcpttype 5
2979 * @optest op1=1 op2=2 -> op1=2
2980 * @optest op1=0 op2=-42 -> op1=-42
2981 */
2982FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2983{
2984 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2985 if (IEM_IS_MODRM_MEM_MODE(bRm))
2986 {
2987 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2988 IEM_MC_BEGIN(0, 2);
2989 IEM_MC_LOCAL(uint64_t, uSrc);
2990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2991
2992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2994 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2995 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2996
2997 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2998 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2999
3000 IEM_MC_ADVANCE_RIP();
3001 IEM_MC_END();
3002 return VINF_SUCCESS;
3003 }
3004
3005 /**
3006 * @opdone
3007 * @opmnemonic ud660f16m3
3008 * @opcode 0x16
3009 * @opcodesub 11 mr/reg
3010 * @oppfx 0x66
3011 * @opunused immediate
3012 * @opcpuid sse
3013 * @optest ->
3014 */
3015 return IEMOP_RAISE_INVALID_OPCODE();
3016}
3017
3018
3019/**
3020 * @opcode 0x16
3021 * @oppfx 0xf3
3022 * @opcpuid sse3
3023 * @opgroup og_sse3_pcksclr_datamove
3024 * @opxcpttype 4
3025 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3026 * op1=0x00000002000000020000000100000001
3027 */
3028FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3029{
3030 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3032 if (IEM_IS_MODRM_REG_MODE(bRm))
3033 {
3034 /*
3035 * Register, register.
3036 */
3037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3038 IEM_MC_BEGIN(2, 0);
3039 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3040 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3041
3042 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3043 IEM_MC_PREPARE_SSE_USAGE();
3044
3045 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3046 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3047 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
3048
3049 IEM_MC_ADVANCE_RIP();
3050 IEM_MC_END();
3051 }
3052 else
3053 {
3054 /*
3055 * Register, memory.
3056 */
3057 IEM_MC_BEGIN(2, 2);
3058 IEM_MC_LOCAL(RTUINT128U, uSrc);
3059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3060 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3061 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3062
3063 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3065 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3066 IEM_MC_PREPARE_SSE_USAGE();
3067
3068 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3069 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3070 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
3071
3072 IEM_MC_ADVANCE_RIP();
3073 IEM_MC_END();
3074 }
3075 return VINF_SUCCESS;
3076}
3077
3078/**
3079 * @opdone
3080 * @opmnemonic udf30f16
3081 * @opcode 0x16
3082 * @oppfx 0xf2
3083 * @opunused intel-modrm
3084 * @opcpuid sse
3085 * @optest ->
3086 * @opdone
3087 */
3088
3089
3090/**
3091 * @opcode 0x17
3092 * @opcodesub !11 mr/reg
3093 * @oppfx none
3094 * @opcpuid sse
3095 * @opgroup og_sse_simdfp_datamove
3096 * @opxcpttype 5
3097 * @optest op1=1 op2=2 -> op1=2
3098 * @optest op1=0 op2=-42 -> op1=-42
3099 */
3100FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3101{
3102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3103 if (IEM_IS_MODRM_MEM_MODE(bRm))
3104 {
3105 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3106
3107 IEM_MC_BEGIN(0, 2);
3108 IEM_MC_LOCAL(uint64_t, uSrc);
3109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3110
3111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3113 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3114 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3115
3116 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3117 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3118
3119 IEM_MC_ADVANCE_RIP();
3120 IEM_MC_END();
3121 return VINF_SUCCESS;
3122 }
3123
3124 /**
3125 * @opdone
3126 * @opmnemonic ud0f17m3
3127 * @opcode 0x17
3128 * @opcodesub 11 mr/reg
3129 * @oppfx none
3130 * @opunused immediate
3131 * @opcpuid sse
3132 * @optest ->
3133 */
3134 return IEMOP_RAISE_INVALID_OPCODE();
3135}
3136
3137
3138/**
3139 * @opcode 0x17
3140 * @opcodesub !11 mr/reg
3141 * @oppfx 0x66
3142 * @opcpuid sse2
3143 * @opgroup og_sse2_pcksclr_datamove
3144 * @opxcpttype 5
3145 * @optest op1=1 op2=2 -> op1=2
3146 * @optest op1=0 op2=-42 -> op1=-42
3147 */
3148FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3149{
3150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3151 if (IEM_IS_MODRM_MEM_MODE(bRm))
3152 {
3153 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3154
3155 IEM_MC_BEGIN(0, 2);
3156 IEM_MC_LOCAL(uint64_t, uSrc);
3157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3158
3159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3161 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3162 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3163
3164 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3165 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3166
3167 IEM_MC_ADVANCE_RIP();
3168 IEM_MC_END();
3169 return VINF_SUCCESS;
3170 }
3171
3172 /**
3173 * @opdone
3174 * @opmnemonic ud660f17m3
3175 * @opcode 0x17
3176 * @opcodesub 11 mr/reg
3177 * @oppfx 0x66
3178 * @opunused immediate
3179 * @opcpuid sse
3180 * @optest ->
3181 */
3182 return IEMOP_RAISE_INVALID_OPCODE();
3183}
3184
3185
3186/**
3187 * @opdone
3188 * @opmnemonic udf30f17
3189 * @opcode 0x17
3190 * @oppfx 0xf3
3191 * @opunused intel-modrm
3192 * @opcpuid sse
3193 * @optest ->
3194 * @opdone
3195 */
3196
3197/**
3198 * @opmnemonic udf20f17
3199 * @opcode 0x17
3200 * @oppfx 0xf2
3201 * @opunused intel-modrm
3202 * @opcpuid sse
3203 * @optest ->
3204 * @opdone
3205 */
3206
3207
3208/** Opcode 0x0f 0x18. */
3209FNIEMOP_DEF(iemOp_prefetch_Grp16)
3210{
3211 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3212 if (IEM_IS_MODRM_MEM_MODE(bRm))
3213 {
3214 switch (IEM_GET_MODRM_REG_8(bRm))
3215 {
3216 case 4: /* Aliased to /0 for the time being according to AMD. */
3217 case 5: /* Aliased to /0 for the time being according to AMD. */
3218 case 6: /* Aliased to /0 for the time being according to AMD. */
3219 case 7: /* Aliased to /0 for the time being according to AMD. */
3220 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3221 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3222 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3223 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3224 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3225 }
3226
3227 IEM_MC_BEGIN(0, 1);
3228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3231 /* Currently a NOP. */
3232 NOREF(GCPtrEffSrc);
3233 IEM_MC_ADVANCE_RIP();
3234 IEM_MC_END();
3235 return VINF_SUCCESS;
3236 }
3237
3238 return IEMOP_RAISE_INVALID_OPCODE();
3239}
3240
3241
3242/** Opcode 0x0f 0x19..0x1f. */
3243FNIEMOP_DEF(iemOp_nop_Ev)
3244{
3245 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3246 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3247 if (IEM_IS_MODRM_REG_MODE(bRm))
3248 {
3249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3250 IEM_MC_BEGIN(0, 0);
3251 IEM_MC_ADVANCE_RIP();
3252 IEM_MC_END();
3253 }
3254 else
3255 {
3256 IEM_MC_BEGIN(0, 1);
3257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3260 /* Currently a NOP. */
3261 NOREF(GCPtrEffSrc);
3262 IEM_MC_ADVANCE_RIP();
3263 IEM_MC_END();
3264 }
3265 return VINF_SUCCESS;
3266}
3267
3268
3269/** Opcode 0x0f 0x20. */
3270FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3271{
3272 /* mod is ignored, as is operand size overrides. */
3273 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3274 IEMOP_HLP_MIN_386();
3275 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3276 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3277 else
3278 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3279
3280 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3281 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3282 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3283 {
3284 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3285 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3286 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3287 iCrReg |= 8;
3288 }
3289 switch (iCrReg)
3290 {
3291 case 0: case 2: case 3: case 4: case 8:
3292 break;
3293 default:
3294 return IEMOP_RAISE_INVALID_OPCODE();
3295 }
3296 IEMOP_HLP_DONE_DECODING();
3297
3298 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3299}
3300
3301
3302/** Opcode 0x0f 0x21. */
3303FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3304{
3305 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3306 IEMOP_HLP_MIN_386();
3307 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3309 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3310 return IEMOP_RAISE_INVALID_OPCODE();
3311 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
3312 IEM_GET_MODRM_RM(pVCpu, bRm),
3313 IEM_GET_MODRM_REG_8(bRm));
3314}
3315
3316
3317/** Opcode 0x0f 0x22. */
3318FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3319{
3320 /* mod is ignored, as is operand size overrides. */
3321 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3322 IEMOP_HLP_MIN_386();
3323 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3324 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3325 else
3326 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3327
3328 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3329 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3330 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3331 {
3332 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3333 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3334 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3335 iCrReg |= 8;
3336 }
3337 switch (iCrReg)
3338 {
3339 case 0: case 2: case 3: case 4: case 8:
3340 break;
3341 default:
3342 return IEMOP_RAISE_INVALID_OPCODE();
3343 }
3344 IEMOP_HLP_DONE_DECODING();
3345
3346 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3347}
3348
3349
3350/** Opcode 0x0f 0x23. */
3351FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3352{
3353 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3354 IEMOP_HLP_MIN_386();
3355 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3357 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3358 return IEMOP_RAISE_INVALID_OPCODE();
3359 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
3360 IEM_GET_MODRM_REG_8(bRm),
3361 IEM_GET_MODRM_RM(pVCpu, bRm));
3362}
3363
3364
3365/** Opcode 0x0f 0x24. */
3366FNIEMOP_DEF(iemOp_mov_Rd_Td)
3367{
3368 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3369 IEMOP_HLP_MIN_386();
3370 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3372 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3373 return IEMOP_RAISE_INVALID_OPCODE();
3374 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Td,
3375 IEM_GET_MODRM_RM(pVCpu, bRm),
3376 IEM_GET_MODRM_REG_8(bRm));
3377}
3378
3379
3380/** Opcode 0x0f 0x26. */
3381FNIEMOP_DEF(iemOp_mov_Td_Rd)
3382{
3383 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3384 IEMOP_HLP_MIN_386();
3385 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3387 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3388 return IEMOP_RAISE_INVALID_OPCODE();
3389 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Td_Rd,
3390 IEM_GET_MODRM_REG_8(bRm),
3391 IEM_GET_MODRM_RM(pVCpu, bRm));
3392}
3393
3394
3395/**
3396 * @opcode 0x28
3397 * @oppfx none
3398 * @opcpuid sse
3399 * @opgroup og_sse_simdfp_datamove
3400 * @opxcpttype 1
3401 * @optest op1=1 op2=2 -> op1=2
3402 * @optest op1=0 op2=-42 -> op1=-42
3403 */
3404FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3405{
3406 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3407 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3408 if (IEM_IS_MODRM_REG_MODE(bRm))
3409 {
3410 /*
3411 * Register, register.
3412 */
3413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3414 IEM_MC_BEGIN(0, 0);
3415 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3416 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3417 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3418 IEM_GET_MODRM_RM(pVCpu, bRm));
3419 IEM_MC_ADVANCE_RIP();
3420 IEM_MC_END();
3421 }
3422 else
3423 {
3424 /*
3425 * Register, memory.
3426 */
3427 IEM_MC_BEGIN(0, 2);
3428 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3430
3431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3433 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3434 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3435
3436 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3437 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3438
3439 IEM_MC_ADVANCE_RIP();
3440 IEM_MC_END();
3441 }
3442 return VINF_SUCCESS;
3443}
3444
3445/**
3446 * @opcode 0x28
3447 * @oppfx 66
3448 * @opcpuid sse2
3449 * @opgroup og_sse2_pcksclr_datamove
3450 * @opxcpttype 1
3451 * @optest op1=1 op2=2 -> op1=2
3452 * @optest op1=0 op2=-42 -> op1=-42
3453 */
3454FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3455{
3456 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3457 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3458 if (IEM_IS_MODRM_REG_MODE(bRm))
3459 {
3460 /*
3461 * Register, register.
3462 */
3463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3464 IEM_MC_BEGIN(0, 0);
3465 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3466 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3467 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3468 IEM_GET_MODRM_RM(pVCpu, bRm));
3469 IEM_MC_ADVANCE_RIP();
3470 IEM_MC_END();
3471 }
3472 else
3473 {
3474 /*
3475 * Register, memory.
3476 */
3477 IEM_MC_BEGIN(0, 2);
3478 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3480
3481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3483 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3484 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3485
3486 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3487 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3488
3489 IEM_MC_ADVANCE_RIP();
3490 IEM_MC_END();
3491 }
3492 return VINF_SUCCESS;
3493}
3494
3495/* Opcode 0xf3 0x0f 0x28 - invalid */
3496/* Opcode 0xf2 0x0f 0x28 - invalid */
3497
3498/**
3499 * @opcode 0x29
3500 * @oppfx none
3501 * @opcpuid sse
3502 * @opgroup og_sse_simdfp_datamove
3503 * @opxcpttype 1
3504 * @optest op1=1 op2=2 -> op1=2
3505 * @optest op1=0 op2=-42 -> op1=-42
3506 */
3507FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3508{
3509 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3510 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3511 if (IEM_IS_MODRM_REG_MODE(bRm))
3512 {
3513 /*
3514 * Register, register.
3515 */
3516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3517 IEM_MC_BEGIN(0, 0);
3518 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3519 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3520 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3521 IEM_GET_MODRM_REG(pVCpu, bRm));
3522 IEM_MC_ADVANCE_RIP();
3523 IEM_MC_END();
3524 }
3525 else
3526 {
3527 /*
3528 * Memory, register.
3529 */
3530 IEM_MC_BEGIN(0, 2);
3531 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3533
3534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3536 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3537 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3538
3539 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3540 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3541
3542 IEM_MC_ADVANCE_RIP();
3543 IEM_MC_END();
3544 }
3545 return VINF_SUCCESS;
3546}
3547
3548/**
3549 * @opcode 0x29
3550 * @oppfx 66
3551 * @opcpuid sse2
3552 * @opgroup og_sse2_pcksclr_datamove
3553 * @opxcpttype 1
3554 * @optest op1=1 op2=2 -> op1=2
3555 * @optest op1=0 op2=-42 -> op1=-42
3556 */
3557FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3558{
3559 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3561 if (IEM_IS_MODRM_REG_MODE(bRm))
3562 {
3563 /*
3564 * Register, register.
3565 */
3566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3567 IEM_MC_BEGIN(0, 0);
3568 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3569 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3570 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3571 IEM_GET_MODRM_REG(pVCpu, bRm));
3572 IEM_MC_ADVANCE_RIP();
3573 IEM_MC_END();
3574 }
3575 else
3576 {
3577 /*
3578 * Memory, register.
3579 */
3580 IEM_MC_BEGIN(0, 2);
3581 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3583
3584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3586 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3587 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3588
3589 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3590 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3591
3592 IEM_MC_ADVANCE_RIP();
3593 IEM_MC_END();
3594 }
3595 return VINF_SUCCESS;
3596}
3597
3598/* Opcode 0xf3 0x0f 0x29 - invalid */
3599/* Opcode 0xf2 0x0f 0x29 - invalid */
3600
3601
3602/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3603FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3604{
3605 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
3606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3607 if (IEM_IS_MODRM_REG_MODE(bRm))
3608 {
3609 /*
3610 * Register, register.
3611 */
3612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3613
3614 IEM_MC_BEGIN(3, 1);
3615 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3616 IEM_MC_LOCAL(X86XMMREG, Dst);
3617 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3618 IEM_MC_ARG(uint64_t, u64Src, 2);
3619 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3620 IEM_MC_PREPARE_FPU_USAGE();
3621 IEM_MC_FPU_TO_MMX_MODE();
3622
3623 IEM_MC_REF_MXCSR(pfMxcsr);
3624 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3625 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3626
3627 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3628 IEM_MC_IF_MXCSR_XCPT_PENDING()
3629 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3630 IEM_MC_ELSE()
3631 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_RM(pVCpu, bRm), Dst);
3632 IEM_MC_ENDIF();
3633
3634 IEM_MC_ADVANCE_RIP();
3635 IEM_MC_END();
3636 }
3637 else
3638 {
3639 /*
3640 * Register, memory.
3641 */
3642 IEM_MC_BEGIN(3, 3);
3643 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3644 IEM_MC_LOCAL(X86XMMREG, Dst);
3645 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3646 IEM_MC_ARG(uint64_t, u64Src, 2);
3647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3648
3649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3651 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3652 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3653
3654 IEM_MC_PREPARE_FPU_USAGE();
3655 IEM_MC_FPU_TO_MMX_MODE();
3656 IEM_MC_REF_MXCSR(pfMxcsr);
3657
3658 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3659 IEM_MC_IF_MXCSR_XCPT_PENDING()
3660 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3661 IEM_MC_ELSE()
3662 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_RM(pVCpu, bRm), Dst);
3663 IEM_MC_ENDIF();
3664
3665 IEM_MC_ADVANCE_RIP();
3666 IEM_MC_END();
3667 }
3668 return VINF_SUCCESS;
3669}
3670
3671
3672/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3673FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3674{
3675 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
3676 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3677 if (IEM_IS_MODRM_REG_MODE(bRm))
3678 {
3679 /*
3680 * Register, register.
3681 */
3682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3683
3684 IEM_MC_BEGIN(3, 1);
3685 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3686 IEM_MC_LOCAL(X86XMMREG, Dst);
3687 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3688 IEM_MC_ARG(uint64_t, u64Src, 2);
3689 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3690 IEM_MC_PREPARE_FPU_USAGE();
3691 IEM_MC_FPU_TO_MMX_MODE();
3692
3693 IEM_MC_REF_MXCSR(pfMxcsr);
3694 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3695
3696 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3697 IEM_MC_IF_MXCSR_XCPT_PENDING()
3698 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3699 IEM_MC_ELSE()
3700 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_RM(pVCpu, bRm), Dst);
3701 IEM_MC_ENDIF();
3702
3703 IEM_MC_ADVANCE_RIP();
3704 IEM_MC_END();
3705 }
3706 else
3707 {
3708 /*
3709 * Register, memory.
3710 */
3711 IEM_MC_BEGIN(3, 3);
3712 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3713 IEM_MC_LOCAL(X86XMMREG, Dst);
3714 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3715 IEM_MC_ARG(uint64_t, u64Src, 2);
3716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3717
3718 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3720 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3721 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3722
3723 /* Doesn't cause a transition to MMX mode. */
3724 IEM_MC_PREPARE_SSE_USAGE();
3725 IEM_MC_REF_MXCSR(pfMxcsr);
3726
3727 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3728 IEM_MC_IF_MXCSR_XCPT_PENDING()
3729 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3730 IEM_MC_ELSE()
3731 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_RM(pVCpu, bRm), Dst);
3732 IEM_MC_ENDIF();
3733
3734 IEM_MC_ADVANCE_RIP();
3735 IEM_MC_END();
3736 }
3737 return VINF_SUCCESS;
3738}
3739
3740
3741/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3742FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3743{
3744 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3745
3746 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3747 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3748 {
3749 if (IEM_IS_MODRM_REG_MODE(bRm))
3750 {
3751 /* XMM, greg64 */
3752 IEM_MC_BEGIN(3, 4);
3753 IEM_MC_LOCAL(uint32_t, fMxcsr);
3754 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3755 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3756 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3757 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3758
3759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3760 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3761 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3762
3763 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3764 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3765 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3766 IEM_MC_IF_MXCSR_XCPT_PENDING()
3767 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3768 IEM_MC_ELSE()
3769 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3770 IEM_MC_ENDIF();
3771
3772 IEM_MC_ADVANCE_RIP();
3773 IEM_MC_END();
3774 }
3775 else
3776 {
3777 /* XMM, [mem64] */
3778 IEM_MC_BEGIN(3, 4);
3779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3780 IEM_MC_LOCAL(uint32_t, fMxcsr);
3781 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3782 IEM_MC_LOCAL(int64_t, i64Src);
3783 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3784 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3785 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3786
3787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3789 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3790 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3791
3792 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3793 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3794 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3795 IEM_MC_IF_MXCSR_XCPT_PENDING()
3796 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3797 IEM_MC_ELSE()
3798 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3799 IEM_MC_ENDIF();
3800
3801 IEM_MC_ADVANCE_RIP();
3802 IEM_MC_END();
3803 }
3804 }
3805 else
3806 {
3807 if (IEM_IS_MODRM_REG_MODE(bRm))
3808 {
3809 /* greg, XMM */
3810 IEM_MC_BEGIN(3, 4);
3811 IEM_MC_LOCAL(uint32_t, fMxcsr);
3812 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3813 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3814 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3815 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3816
3817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3818 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3819 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3820
3821 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3822 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3823 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3824 IEM_MC_IF_MXCSR_XCPT_PENDING()
3825 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3826 IEM_MC_ELSE()
3827 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3828 IEM_MC_ENDIF();
3829
3830 IEM_MC_ADVANCE_RIP();
3831 IEM_MC_END();
3832 }
3833 else
3834 {
3835 /* greg, [mem32] */
3836 IEM_MC_BEGIN(3, 4);
3837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3838 IEM_MC_LOCAL(uint32_t, fMxcsr);
3839 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3840 IEM_MC_LOCAL(int32_t, i32Src);
3841 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3842 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3843 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3844
3845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3847 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3848 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3849
3850 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3851 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3852 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3853 IEM_MC_IF_MXCSR_XCPT_PENDING()
3854 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3855 IEM_MC_ELSE()
3856 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_RM(pVCpu, bRm), r32Dst);
3857 IEM_MC_ENDIF();
3858
3859 IEM_MC_ADVANCE_RIP();
3860 IEM_MC_END();
3861 }
3862 }
3863 return VINF_SUCCESS;
3864}
3865
3866
3867/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3868FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3869{
3870 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
3871
3872 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3873 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3874 {
3875 if (IEM_IS_MODRM_REG_MODE(bRm))
3876 {
3877 /* XMM, greg64 */
3878 IEM_MC_BEGIN(3, 4);
3879 IEM_MC_LOCAL(uint32_t, fMxcsr);
3880 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3881 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3882 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3883 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3884
3885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3886 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3887 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3888
3889 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3890 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3891 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3892 IEM_MC_IF_MXCSR_XCPT_PENDING()
3893 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3894 IEM_MC_ELSE()
3895 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3896 IEM_MC_ENDIF();
3897
3898 IEM_MC_ADVANCE_RIP();
3899 IEM_MC_END();
3900 }
3901 else
3902 {
3903 /* XMM, [mem64] */
3904 IEM_MC_BEGIN(3, 4);
3905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3906 IEM_MC_LOCAL(uint32_t, fMxcsr);
3907 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3908 IEM_MC_LOCAL(int64_t, i64Src);
3909 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3910 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3911 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3912
3913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3915 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3916 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3917
3918 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3919 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3920 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3921 IEM_MC_IF_MXCSR_XCPT_PENDING()
3922 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3923 IEM_MC_ELSE()
3924 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3925 IEM_MC_ENDIF();
3926
3927 IEM_MC_ADVANCE_RIP();
3928 IEM_MC_END();
3929 }
3930 }
3931 else
3932 {
3933 if (IEM_IS_MODRM_REG_MODE(bRm))
3934 {
3935 /* greg, XMM */
3936 IEM_MC_BEGIN(3, 4);
3937 IEM_MC_LOCAL(uint32_t, fMxcsr);
3938 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3939 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3940 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3941 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3942
3943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3944 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3945 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3946
3947 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
3948 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3949 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3950 IEM_MC_IF_MXCSR_XCPT_PENDING()
3951 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3952 IEM_MC_ELSE()
3953 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3954 IEM_MC_ENDIF();
3955
3956 IEM_MC_ADVANCE_RIP();
3957 IEM_MC_END();
3958 }
3959 else
3960 {
3961 /* greg, [mem32] */
3962 IEM_MC_BEGIN(3, 4);
3963 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3964 IEM_MC_LOCAL(uint32_t, fMxcsr);
3965 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3966 IEM_MC_LOCAL(int32_t, i32Src);
3967 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3968 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3969 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3970
3971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3973 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3974 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3975
3976 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3977 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3978 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3979 IEM_MC_IF_MXCSR_XCPT_PENDING()
3980 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3981 IEM_MC_ELSE()
3982 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_RM(pVCpu, bRm), r64Dst);
3983 IEM_MC_ENDIF();
3984
3985 IEM_MC_ADVANCE_RIP();
3986 IEM_MC_END();
3987 }
3988 }
3989 return VINF_SUCCESS;
3990}
3991
3992
3993/**
3994 * @opcode 0x2b
3995 * @opcodesub !11 mr/reg
3996 * @oppfx none
3997 * @opcpuid sse
3998 * @opgroup og_sse1_cachect
3999 * @opxcpttype 1
4000 * @optest op1=1 op2=2 -> op1=2
4001 * @optest op1=0 op2=-42 -> op1=-42
4002 */
4003FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
4004{
4005 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4006 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4007 if (IEM_IS_MODRM_MEM_MODE(bRm))
4008 {
4009 /*
4010 * memory, register.
4011 */
4012 IEM_MC_BEGIN(0, 2);
4013 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4015
4016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4018 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4019 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4020
4021 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4022 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4023
4024 IEM_MC_ADVANCE_RIP();
4025 IEM_MC_END();
4026 }
4027 /* The register, register encoding is invalid. */
4028 else
4029 return IEMOP_RAISE_INVALID_OPCODE();
4030 return VINF_SUCCESS;
4031}
4032
4033/**
4034 * @opcode 0x2b
4035 * @opcodesub !11 mr/reg
4036 * @oppfx 0x66
4037 * @opcpuid sse2
4038 * @opgroup og_sse2_cachect
4039 * @opxcpttype 1
4040 * @optest op1=1 op2=2 -> op1=2
4041 * @optest op1=0 op2=-42 -> op1=-42
4042 */
4043FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
4044{
4045 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4046 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4047 if (IEM_IS_MODRM_MEM_MODE(bRm))
4048 {
4049 /*
4050 * memory, register.
4051 */
4052 IEM_MC_BEGIN(0, 2);
4053 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4054 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4055
4056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4058 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4059 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4060
4061 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4062 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4063
4064 IEM_MC_ADVANCE_RIP();
4065 IEM_MC_END();
4066 }
4067 /* The register, register encoding is invalid. */
4068 else
4069 return IEMOP_RAISE_INVALID_OPCODE();
4070 return VINF_SUCCESS;
4071}
4072/* Opcode 0xf3 0x0f 0x2b - invalid */
4073/* Opcode 0xf2 0x0f 0x2b - invalid */
4074
4075
4076/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
4077FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
4078{
4079 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
4080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4081 if (IEM_IS_MODRM_REG_MODE(bRm))
4082 {
4083 /*
4084 * Register, register.
4085 */
4086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4087
4088 IEM_MC_BEGIN(3, 1);
4089 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4090 IEM_MC_LOCAL(uint64_t, u64Dst);
4091 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4092 IEM_MC_ARG(uint64_t, u64Src, 2);
4093 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4094 IEM_MC_PREPARE_FPU_USAGE();
4095 IEM_MC_FPU_TO_MMX_MODE();
4096
4097 IEM_MC_REF_MXCSR(pfMxcsr);
4098 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4099
4100 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4101 IEM_MC_IF_MXCSR_XCPT_PENDING()
4102 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4103 IEM_MC_ELSE()
4104 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
4105 IEM_MC_ENDIF();
4106
4107 IEM_MC_ADVANCE_RIP();
4108 IEM_MC_END();
4109 }
4110 else
4111 {
4112 /*
4113 * Register, memory.
4114 */
4115 IEM_MC_BEGIN(3, 2);
4116 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4117 IEM_MC_LOCAL(uint64_t, u64Dst);
4118 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4119 IEM_MC_ARG(uint64_t, u64Src, 2);
4120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4121
4122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4124 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4125 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4126
4127 IEM_MC_PREPARE_FPU_USAGE();
4128 IEM_MC_FPU_TO_MMX_MODE();
4129 IEM_MC_REF_MXCSR(pfMxcsr);
4130
4131 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4132 IEM_MC_IF_MXCSR_XCPT_PENDING()
4133 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4134 IEM_MC_ELSE()
4135 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
4136 IEM_MC_ENDIF();
4137
4138 IEM_MC_ADVANCE_RIP();
4139 IEM_MC_END();
4140 }
4141 return VINF_SUCCESS;
4142}
4143
4144
4145/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
4146FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
4147{
4148 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
4149 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4150 if (IEM_IS_MODRM_REG_MODE(bRm))
4151 {
4152 /*
4153 * Register, register.
4154 */
4155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4156
4157 IEM_MC_BEGIN(3, 1);
4158 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4159 IEM_MC_LOCAL(uint64_t, u64Dst);
4160 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4161 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4162 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4163 IEM_MC_PREPARE_FPU_USAGE();
4164 IEM_MC_FPU_TO_MMX_MODE();
4165
4166 IEM_MC_REF_MXCSR(pfMxcsr);
4167 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4168
4169 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4170 IEM_MC_IF_MXCSR_XCPT_PENDING()
4171 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4172 IEM_MC_ELSE()
4173 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Dst);
4174 IEM_MC_ENDIF();
4175
4176 IEM_MC_ADVANCE_RIP();
4177 IEM_MC_END();
4178 }
4179 else
4180 {
4181 /*
4182 * Register, memory.
4183 */
4184 IEM_MC_BEGIN(3, 3);
4185 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4186 IEM_MC_LOCAL(uint64_t, u64Dst);
4187 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4188 IEM_MC_LOCAL(X86XMMREG, uSrc);
4189 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4191
4192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4194 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4195 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4196
4197 IEM_MC_PREPARE_FPU_USAGE();
4198 IEM_MC_FPU_TO_MMX_MODE();
4199
4200 IEM_MC_REF_MXCSR(pfMxcsr);
4201
4202 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4203 IEM_MC_IF_MXCSR_XCPT_PENDING()
4204 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4205 IEM_MC_ELSE()
4206 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Dst);
4207 IEM_MC_ENDIF();
4208
4209 IEM_MC_ADVANCE_RIP();
4210 IEM_MC_END();
4211 }
4212 return VINF_SUCCESS;
4213}
4214
4215
4216/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4217FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4218{
4219 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4220
4221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4222 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4223 {
4224 if (IEM_IS_MODRM_REG_MODE(bRm))
4225 {
4226 /* greg64, XMM */
4227 IEM_MC_BEGIN(3, 4);
4228 IEM_MC_LOCAL(uint32_t, fMxcsr);
4229 IEM_MC_LOCAL(int64_t, i64Dst);
4230 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4231 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4232 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4233
4234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4235 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4236 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4237
4238 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4239 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4240 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4241 IEM_MC_IF_MXCSR_XCPT_PENDING()
4242 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4243 IEM_MC_ELSE()
4244 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4245 IEM_MC_ENDIF();
4246
4247 IEM_MC_ADVANCE_RIP();
4248 IEM_MC_END();
4249 }
4250 else
4251 {
4252 /* greg64, [mem64] */
4253 IEM_MC_BEGIN(3, 4);
4254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4255 IEM_MC_LOCAL(uint32_t, fMxcsr);
4256 IEM_MC_LOCAL(int64_t, i64Dst);
4257 IEM_MC_LOCAL(uint32_t, u32Src);
4258 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4259 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4260 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4261
4262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4264 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4265 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4266
4267 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4268 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4269 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4270 IEM_MC_IF_MXCSR_XCPT_PENDING()
4271 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4272 IEM_MC_ELSE()
4273 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4274 IEM_MC_ENDIF();
4275
4276 IEM_MC_ADVANCE_RIP();
4277 IEM_MC_END();
4278 }
4279 }
4280 else
4281 {
4282 if (IEM_IS_MODRM_REG_MODE(bRm))
4283 {
4284 /* greg, XMM */
4285 IEM_MC_BEGIN(3, 4);
4286 IEM_MC_LOCAL(uint32_t, fMxcsr);
4287 IEM_MC_LOCAL(int32_t, i32Dst);
4288 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4289 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4290 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4291
4292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4293 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4294 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4295
4296 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4297 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4298 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4299 IEM_MC_IF_MXCSR_XCPT_PENDING()
4300 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4301 IEM_MC_ELSE()
4302 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4303 IEM_MC_ENDIF();
4304
4305 IEM_MC_ADVANCE_RIP();
4306 IEM_MC_END();
4307 }
4308 else
4309 {
4310 /* greg, [mem] */
4311 IEM_MC_BEGIN(3, 4);
4312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4313 IEM_MC_LOCAL(uint32_t, fMxcsr);
4314 IEM_MC_LOCAL(int32_t, i32Dst);
4315 IEM_MC_LOCAL(uint32_t, u32Src);
4316 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4317 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4318 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4319
4320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4322 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4323 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4324
4325 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4326 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4327 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4328 IEM_MC_IF_MXCSR_XCPT_PENDING()
4329 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4330 IEM_MC_ELSE()
4331 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4332 IEM_MC_ENDIF();
4333
4334 IEM_MC_ADVANCE_RIP();
4335 IEM_MC_END();
4336 }
4337 }
4338 return VINF_SUCCESS;
4339}
4340
4341
4342/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4343FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4344{
4345 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4346
4347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4348 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4349 {
4350 if (IEM_IS_MODRM_REG_MODE(bRm))
4351 {
4352 /* greg64, XMM */
4353 IEM_MC_BEGIN(3, 4);
4354 IEM_MC_LOCAL(uint32_t, fMxcsr);
4355 IEM_MC_LOCAL(int64_t, i64Dst);
4356 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4357 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4358 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4359
4360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4361 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4362 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4363
4364 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4365 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4366 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4367 IEM_MC_IF_MXCSR_XCPT_PENDING()
4368 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4369 IEM_MC_ELSE()
4370 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4371 IEM_MC_ENDIF();
4372
4373 IEM_MC_ADVANCE_RIP();
4374 IEM_MC_END();
4375 }
4376 else
4377 {
4378 /* greg64, [mem64] */
4379 IEM_MC_BEGIN(3, 4);
4380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4381 IEM_MC_LOCAL(uint32_t, fMxcsr);
4382 IEM_MC_LOCAL(int64_t, i64Dst);
4383 IEM_MC_LOCAL(uint64_t, u64Src);
4384 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4385 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4386 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4387
4388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4390 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4391 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4392
4393 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4394 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4395 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4396 IEM_MC_IF_MXCSR_XCPT_PENDING()
4397 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4398 IEM_MC_ELSE()
4399 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4400 IEM_MC_ENDIF();
4401
4402 IEM_MC_ADVANCE_RIP();
4403 IEM_MC_END();
4404 }
4405 }
4406 else
4407 {
4408 if (IEM_IS_MODRM_REG_MODE(bRm))
4409 {
4410 /* greg, XMM */
4411 IEM_MC_BEGIN(3, 4);
4412 IEM_MC_LOCAL(uint32_t, fMxcsr);
4413 IEM_MC_LOCAL(int32_t, i32Dst);
4414 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4415 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4416 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4417
4418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4419 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4420 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4421
4422 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4423 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4424 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4425 IEM_MC_IF_MXCSR_XCPT_PENDING()
4426 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4427 IEM_MC_ELSE()
4428 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4429 IEM_MC_ENDIF();
4430
4431 IEM_MC_ADVANCE_RIP();
4432 IEM_MC_END();
4433 }
4434 else
4435 {
4436 /* greg, [mem] */
4437 IEM_MC_BEGIN(3, 4);
4438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4439 IEM_MC_LOCAL(uint32_t, fMxcsr);
4440 IEM_MC_LOCAL(int32_t, i32Dst);
4441 IEM_MC_LOCAL(uint64_t, u64Src);
4442 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4443 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4444 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4445
4446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4448 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4449 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4450
4451 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4452 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4453 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4454 IEM_MC_IF_MXCSR_XCPT_PENDING()
4455 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4456 IEM_MC_ELSE()
4457 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4458 IEM_MC_ENDIF();
4459
4460 IEM_MC_ADVANCE_RIP();
4461 IEM_MC_END();
4462 }
4463 }
4464 return VINF_SUCCESS;
4465}
4466
4467
4468/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4469FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4470{
4471 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
4472 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4473 if (IEM_IS_MODRM_REG_MODE(bRm))
4474 {
4475 /*
4476 * Register, register.
4477 */
4478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4479
4480 IEM_MC_BEGIN(3, 1);
4481 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4482 IEM_MC_LOCAL(uint64_t, u64Dst);
4483 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4484 IEM_MC_ARG(uint64_t, u64Src, 2);
4485 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4486 IEM_MC_PREPARE_FPU_USAGE();
4487 IEM_MC_FPU_TO_MMX_MODE();
4488
4489 IEM_MC_REF_MXCSR(pfMxcsr);
4490 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4491
4492 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4493 IEM_MC_IF_MXCSR_XCPT_PENDING()
4494 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4495 IEM_MC_ELSE()
4496 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
4497 IEM_MC_ENDIF();
4498
4499 IEM_MC_ADVANCE_RIP();
4500 IEM_MC_END();
4501 }
4502 else
4503 {
4504 /*
4505 * Register, memory.
4506 */
4507 IEM_MC_BEGIN(3, 2);
4508 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4509 IEM_MC_LOCAL(uint64_t, u64Dst);
4510 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4511 IEM_MC_ARG(uint64_t, u64Src, 2);
4512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4513
4514 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4516 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4517 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4518
4519 IEM_MC_PREPARE_FPU_USAGE();
4520 IEM_MC_FPU_TO_MMX_MODE();
4521 IEM_MC_REF_MXCSR(pfMxcsr);
4522
4523 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4524 IEM_MC_IF_MXCSR_XCPT_PENDING()
4525 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4526 IEM_MC_ELSE()
4527 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
4528 IEM_MC_ENDIF();
4529
4530 IEM_MC_ADVANCE_RIP();
4531 IEM_MC_END();
4532 }
4533 return VINF_SUCCESS;
4534}
4535
4536
4537/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4538FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4539{
4540 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /// @todo
4541 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4542 if (IEM_IS_MODRM_REG_MODE(bRm))
4543 {
4544 /*
4545 * Register, register.
4546 */
4547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4548
4549 IEM_MC_BEGIN(3, 1);
4550 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4551 IEM_MC_LOCAL(uint64_t, u64Dst);
4552 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4553 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4554 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4555 IEM_MC_PREPARE_FPU_USAGE();
4556 IEM_MC_FPU_TO_MMX_MODE();
4557
4558 IEM_MC_REF_MXCSR(pfMxcsr);
4559 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4560
4561 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4562 IEM_MC_IF_MXCSR_XCPT_PENDING()
4563 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4564 IEM_MC_ELSE()
4565 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Dst);
4566 IEM_MC_ENDIF();
4567
4568 IEM_MC_ADVANCE_RIP();
4569 IEM_MC_END();
4570 }
4571 else
4572 {
4573 /*
4574 * Register, memory.
4575 */
4576 IEM_MC_BEGIN(3, 3);
4577 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4578 IEM_MC_LOCAL(uint64_t, u64Dst);
4579 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4580 IEM_MC_LOCAL(X86XMMREG, uSrc);
4581 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4583
4584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4586 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4587 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4588
4589 IEM_MC_PREPARE_FPU_USAGE();
4590 IEM_MC_FPU_TO_MMX_MODE();
4591
4592 IEM_MC_REF_MXCSR(pfMxcsr);
4593
4594 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4595 IEM_MC_IF_MXCSR_XCPT_PENDING()
4596 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4597 IEM_MC_ELSE()
4598 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Dst);
4599 IEM_MC_ENDIF();
4600
4601 IEM_MC_ADVANCE_RIP();
4602 IEM_MC_END();
4603 }
4604 return VINF_SUCCESS;
4605}
4606
4607
4608/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4609FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4610{
4611 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4612
4613 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4614 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4615 {
4616 if (IEM_IS_MODRM_REG_MODE(bRm))
4617 {
4618 /* greg64, XMM */
4619 IEM_MC_BEGIN(3, 4);
4620 IEM_MC_LOCAL(uint32_t, fMxcsr);
4621 IEM_MC_LOCAL(int64_t, i64Dst);
4622 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4623 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4624 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4625
4626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4627 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4628 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4629
4630 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4631 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4632 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4633 IEM_MC_IF_MXCSR_XCPT_PENDING()
4634 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4635 IEM_MC_ELSE()
4636 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4637 IEM_MC_ENDIF();
4638
4639 IEM_MC_ADVANCE_RIP();
4640 IEM_MC_END();
4641 }
4642 else
4643 {
4644 /* greg64, [mem64] */
4645 IEM_MC_BEGIN(3, 4);
4646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4647 IEM_MC_LOCAL(uint32_t, fMxcsr);
4648 IEM_MC_LOCAL(int64_t, i64Dst);
4649 IEM_MC_LOCAL(uint32_t, u32Src);
4650 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4651 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4652 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4653
4654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4656 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4657 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4658
4659 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4660 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4661 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4662 IEM_MC_IF_MXCSR_XCPT_PENDING()
4663 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4664 IEM_MC_ELSE()
4665 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4666 IEM_MC_ENDIF();
4667
4668 IEM_MC_ADVANCE_RIP();
4669 IEM_MC_END();
4670 }
4671 }
4672 else
4673 {
4674 if (IEM_IS_MODRM_REG_MODE(bRm))
4675 {
4676 /* greg, XMM */
4677 IEM_MC_BEGIN(3, 4);
4678 IEM_MC_LOCAL(uint32_t, fMxcsr);
4679 IEM_MC_LOCAL(int32_t, i32Dst);
4680 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4681 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4682 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4683
4684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4685 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4686 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4687
4688 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4689 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4690 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4691 IEM_MC_IF_MXCSR_XCPT_PENDING()
4692 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4693 IEM_MC_ELSE()
4694 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4695 IEM_MC_ENDIF();
4696
4697 IEM_MC_ADVANCE_RIP();
4698 IEM_MC_END();
4699 }
4700 else
4701 {
4702 /* greg, [mem] */
4703 IEM_MC_BEGIN(3, 4);
4704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4705 IEM_MC_LOCAL(uint32_t, fMxcsr);
4706 IEM_MC_LOCAL(int32_t, i32Dst);
4707 IEM_MC_LOCAL(uint32_t, u32Src);
4708 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4709 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4710 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4711
4712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4714 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4715 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4716
4717 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4718 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4719 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4720 IEM_MC_IF_MXCSR_XCPT_PENDING()
4721 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4722 IEM_MC_ELSE()
4723 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4724 IEM_MC_ENDIF();
4725
4726 IEM_MC_ADVANCE_RIP();
4727 IEM_MC_END();
4728 }
4729 }
4730 return VINF_SUCCESS;
4731}
4732
4733
4734/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4735FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4736{
4737 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4738
4739 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4740 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4741 {
4742 if (IEM_IS_MODRM_REG_MODE(bRm))
4743 {
4744 /* greg64, XMM */
4745 IEM_MC_BEGIN(3, 4);
4746 IEM_MC_LOCAL(uint32_t, fMxcsr);
4747 IEM_MC_LOCAL(int64_t, i64Dst);
4748 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4749 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4750 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4751
4752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4753 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4754 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4755
4756 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4757 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4758 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4759 IEM_MC_IF_MXCSR_XCPT_PENDING()
4760 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4761 IEM_MC_ELSE()
4762 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4763 IEM_MC_ENDIF();
4764
4765 IEM_MC_ADVANCE_RIP();
4766 IEM_MC_END();
4767 }
4768 else
4769 {
4770 /* greg64, [mem64] */
4771 IEM_MC_BEGIN(3, 4);
4772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4773 IEM_MC_LOCAL(uint32_t, fMxcsr);
4774 IEM_MC_LOCAL(int64_t, i64Dst);
4775 IEM_MC_LOCAL(uint64_t, u64Src);
4776 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4777 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4778 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4779
4780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4782 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4783 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4784
4785 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4786 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4787 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4788 IEM_MC_IF_MXCSR_XCPT_PENDING()
4789 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4790 IEM_MC_ELSE()
4791 IEM_MC_STORE_GREG_I64(i64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4792 IEM_MC_ENDIF();
4793
4794 IEM_MC_ADVANCE_RIP();
4795 IEM_MC_END();
4796 }
4797 }
4798 else
4799 {
4800 if (IEM_IS_MODRM_REG_MODE(bRm))
4801 {
4802 /* greg, XMM */
4803 IEM_MC_BEGIN(3, 4);
4804 IEM_MC_LOCAL(uint32_t, fMxcsr);
4805 IEM_MC_LOCAL(int32_t, i32Dst);
4806 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4807 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4808 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4809
4810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4811 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4812 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4813
4814 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4815 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4816 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4817 IEM_MC_IF_MXCSR_XCPT_PENDING()
4818 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4819 IEM_MC_ELSE()
4820 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4821 IEM_MC_ENDIF();
4822
4823 IEM_MC_ADVANCE_RIP();
4824 IEM_MC_END();
4825 }
4826 else
4827 {
4828 /* greg, [mem] */
4829 IEM_MC_BEGIN(3, 4);
4830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4831 IEM_MC_LOCAL(uint32_t, fMxcsr);
4832 IEM_MC_LOCAL(int32_t, i32Dst);
4833 IEM_MC_LOCAL(uint64_t, u64Src);
4834 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4835 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4836 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4837
4838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4840 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4841 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4842
4843 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4844 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4845 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4846 IEM_MC_IF_MXCSR_XCPT_PENDING()
4847 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4848 IEM_MC_ELSE()
4849 IEM_MC_STORE_GREG_I32(i32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
4850 IEM_MC_ENDIF();
4851
4852 IEM_MC_ADVANCE_RIP();
4853 IEM_MC_END();
4854 }
4855 }
4856 return VINF_SUCCESS;
4857}
4858
4859
4860/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
4861FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4862{
4863 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4864 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4865 if (IEM_IS_MODRM_REG_MODE(bRm))
4866 {
4867 /*
4868 * Register, register.
4869 */
4870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4871 IEM_MC_BEGIN(4, 1);
4872 IEM_MC_LOCAL(uint32_t, fEFlags);
4873 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4874 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4875 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4876 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4877 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4878 IEM_MC_PREPARE_SSE_USAGE();
4879 IEM_MC_FETCH_EFLAGS(fEFlags);
4880 IEM_MC_REF_MXCSR(pfMxcsr);
4881 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4882 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4883 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4884 IEM_MC_IF_MXCSR_XCPT_PENDING()
4885 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4886 IEM_MC_ELSE()
4887 IEM_MC_COMMIT_EFLAGS(fEFlags);
4888 IEM_MC_ENDIF();
4889
4890 IEM_MC_ADVANCE_RIP();
4891 IEM_MC_END();
4892 }
4893 else
4894 {
4895 /*
4896 * Register, memory.
4897 */
4898 IEM_MC_BEGIN(4, 3);
4899 IEM_MC_LOCAL(uint32_t, fEFlags);
4900 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4901 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4902 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4903 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4904 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4906
4907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4909 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4910 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4911
4912 IEM_MC_PREPARE_SSE_USAGE();
4913 IEM_MC_FETCH_EFLAGS(fEFlags);
4914 IEM_MC_REF_MXCSR(pfMxcsr);
4915 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4916 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4917 IEM_MC_IF_MXCSR_XCPT_PENDING()
4918 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4919 IEM_MC_ELSE()
4920 IEM_MC_COMMIT_EFLAGS(fEFlags);
4921 IEM_MC_ENDIF();
4922
4923 IEM_MC_ADVANCE_RIP();
4924 IEM_MC_END();
4925 }
4926 return VINF_SUCCESS;
4927}
4928
4929
4930/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
4931FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4932{
4933 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4934 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4935 if (IEM_IS_MODRM_REG_MODE(bRm))
4936 {
4937 /*
4938 * Register, register.
4939 */
4940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4941 IEM_MC_BEGIN(4, 1);
4942 IEM_MC_LOCAL(uint32_t, fEFlags);
4943 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4944 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4945 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4946 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4947 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4948 IEM_MC_PREPARE_SSE_USAGE();
4949 IEM_MC_FETCH_EFLAGS(fEFlags);
4950 IEM_MC_REF_MXCSR(pfMxcsr);
4951 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4952 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4953 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4954 IEM_MC_IF_MXCSR_XCPT_PENDING()
4955 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4956 IEM_MC_ELSE()
4957 IEM_MC_COMMIT_EFLAGS(fEFlags);
4958 IEM_MC_ENDIF();
4959
4960 IEM_MC_ADVANCE_RIP();
4961 IEM_MC_END();
4962 }
4963 else
4964 {
4965 /*
4966 * Register, memory.
4967 */
4968 IEM_MC_BEGIN(4, 3);
4969 IEM_MC_LOCAL(uint32_t, fEFlags);
4970 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4971 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4972 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4973 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4974 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4976
4977 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4979 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4980 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4981
4982 IEM_MC_PREPARE_SSE_USAGE();
4983 IEM_MC_FETCH_EFLAGS(fEFlags);
4984 IEM_MC_REF_MXCSR(pfMxcsr);
4985 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4986 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4987 IEM_MC_IF_MXCSR_XCPT_PENDING()
4988 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4989 IEM_MC_ELSE()
4990 IEM_MC_COMMIT_EFLAGS(fEFlags);
4991 IEM_MC_ENDIF();
4992
4993 IEM_MC_ADVANCE_RIP();
4994 IEM_MC_END();
4995 }
4996 return VINF_SUCCESS;
4997}
4998
4999
5000/* Opcode 0xf3 0x0f 0x2e - invalid */
5001/* Opcode 0xf2 0x0f 0x2e - invalid */
5002
5003
5004/** Opcode 0x0f 0x2f - comiss Vss, Wss */
5005FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
5006{
5007 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5008 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5009 if (IEM_IS_MODRM_REG_MODE(bRm))
5010 {
5011 /*
5012 * Register, register.
5013 */
5014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5015 IEM_MC_BEGIN(4, 1);
5016 IEM_MC_LOCAL(uint32_t, fEFlags);
5017 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5018 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5019 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5020 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5021 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5022 IEM_MC_PREPARE_SSE_USAGE();
5023 IEM_MC_FETCH_EFLAGS(fEFlags);
5024 IEM_MC_REF_MXCSR(pfMxcsr);
5025 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5026 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5027 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5028 IEM_MC_IF_MXCSR_XCPT_PENDING()
5029 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5030 IEM_MC_ELSE()
5031 IEM_MC_COMMIT_EFLAGS(fEFlags);
5032 IEM_MC_ENDIF();
5033
5034 IEM_MC_ADVANCE_RIP();
5035 IEM_MC_END();
5036 }
5037 else
5038 {
5039 /*
5040 * Register, memory.
5041 */
5042 IEM_MC_BEGIN(4, 3);
5043 IEM_MC_LOCAL(uint32_t, fEFlags);
5044 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5045 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5046 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5047 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5048 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5050
5051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5053 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5054 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5055
5056 IEM_MC_PREPARE_SSE_USAGE();
5057 IEM_MC_FETCH_EFLAGS(fEFlags);
5058 IEM_MC_REF_MXCSR(pfMxcsr);
5059 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5060 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5061 IEM_MC_IF_MXCSR_XCPT_PENDING()
5062 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5063 IEM_MC_ELSE()
5064 IEM_MC_COMMIT_EFLAGS(fEFlags);
5065 IEM_MC_ENDIF();
5066
5067 IEM_MC_ADVANCE_RIP();
5068 IEM_MC_END();
5069 }
5070 return VINF_SUCCESS;
5071}
5072
5073
5074/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
5075FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
5076{
5077 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5078 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5079 if (IEM_IS_MODRM_REG_MODE(bRm))
5080 {
5081 /*
5082 * Register, register.
5083 */
5084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5085 IEM_MC_BEGIN(4, 1);
5086 IEM_MC_LOCAL(uint32_t, fEFlags);
5087 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5088 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5089 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5090 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5091 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5092 IEM_MC_PREPARE_SSE_USAGE();
5093 IEM_MC_FETCH_EFLAGS(fEFlags);
5094 IEM_MC_REF_MXCSR(pfMxcsr);
5095 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5096 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5097 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5098 IEM_MC_IF_MXCSR_XCPT_PENDING()
5099 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5100 IEM_MC_ELSE()
5101 IEM_MC_COMMIT_EFLAGS(fEFlags);
5102 IEM_MC_ENDIF();
5103
5104 IEM_MC_ADVANCE_RIP();
5105 IEM_MC_END();
5106 }
5107 else
5108 {
5109 /*
5110 * Register, memory.
5111 */
5112 IEM_MC_BEGIN(4, 3);
5113 IEM_MC_LOCAL(uint32_t, fEFlags);
5114 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5115 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5116 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5117 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5118 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5120
5121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5123 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5124 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5125
5126 IEM_MC_PREPARE_SSE_USAGE();
5127 IEM_MC_FETCH_EFLAGS(fEFlags);
5128 IEM_MC_REF_MXCSR(pfMxcsr);
5129 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5130 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5131 IEM_MC_IF_MXCSR_XCPT_PENDING()
5132 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5133 IEM_MC_ELSE()
5134 IEM_MC_COMMIT_EFLAGS(fEFlags);
5135 IEM_MC_ENDIF();
5136
5137 IEM_MC_ADVANCE_RIP();
5138 IEM_MC_END();
5139 }
5140 return VINF_SUCCESS;
5141}
5142
5143
5144/* Opcode 0xf3 0x0f 0x2f - invalid */
5145/* Opcode 0xf2 0x0f 0x2f - invalid */
5146
5147/** Opcode 0x0f 0x30. */
5148FNIEMOP_DEF(iemOp_wrmsr)
5149{
5150 IEMOP_MNEMONIC(wrmsr, "wrmsr");
5151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5152 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
5153}
5154
5155
5156/** Opcode 0x0f 0x31. */
5157FNIEMOP_DEF(iemOp_rdtsc)
5158{
5159 IEMOP_MNEMONIC(rdtsc, "rdtsc");
5160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5161 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
5162}
5163
5164
5165/** Opcode 0x0f 0x33. */
5166FNIEMOP_DEF(iemOp_rdmsr)
5167{
5168 IEMOP_MNEMONIC(rdmsr, "rdmsr");
5169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5170 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
5171}
5172
5173
5174/** Opcode 0x0f 0x34. */
5175FNIEMOP_DEF(iemOp_rdpmc)
5176{
5177 IEMOP_MNEMONIC(rdpmc, "rdpmc");
5178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5179 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
5180}
5181
5182
5183/** Opcode 0x0f 0x34. */
5184FNIEMOP_DEF(iemOp_sysenter)
5185{
5186 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5188 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysenter);
5189}
5190
5191/** Opcode 0x0f 0x35. */
5192FNIEMOP_DEF(iemOp_sysexit)
5193{
5194 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5196 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
5197}
5198
5199/** Opcode 0x0f 0x37. */
5200FNIEMOP_STUB(iemOp_getsec);
5201
5202
5203/** Opcode 0x0f 0x38. */
5204FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
5205{
5206#ifdef IEM_WITH_THREE_0F_38
5207 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5208 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5209#else
5210 IEMOP_BITCH_ABOUT_STUB();
5211 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5212#endif
5213}
5214
5215
5216/** Opcode 0x0f 0x3a. */
5217FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
5218{
5219#ifdef IEM_WITH_THREE_0F_3A
5220 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5221 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5222#else
5223 IEMOP_BITCH_ABOUT_STUB();
5224 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5225#endif
5226}
5227
5228
5229/**
5230 * Implements a conditional move.
5231 *
5232 * Wish there was an obvious way to do this where we could share and reduce
5233 * code bloat.
5234 *
5235 * @param a_Cnd The conditional "microcode" operation.
5236 */
5237#define CMOV_X(a_Cnd) \
5238 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5239 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5240 { \
5241 switch (pVCpu->iem.s.enmEffOpSize) \
5242 { \
5243 case IEMMODE_16BIT: \
5244 IEM_MC_BEGIN(0, 1); \
5245 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5246 a_Cnd { \
5247 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5248 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5249 } IEM_MC_ENDIF(); \
5250 IEM_MC_ADVANCE_RIP(); \
5251 IEM_MC_END(); \
5252 return VINF_SUCCESS; \
5253 \
5254 case IEMMODE_32BIT: \
5255 IEM_MC_BEGIN(0, 1); \
5256 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5257 a_Cnd { \
5258 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5259 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5260 } IEM_MC_ELSE() { \
5261 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5262 } IEM_MC_ENDIF(); \
5263 IEM_MC_ADVANCE_RIP(); \
5264 IEM_MC_END(); \
5265 return VINF_SUCCESS; \
5266 \
5267 case IEMMODE_64BIT: \
5268 IEM_MC_BEGIN(0, 1); \
5269 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5270 a_Cnd { \
5271 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5272 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5273 } IEM_MC_ENDIF(); \
5274 IEM_MC_ADVANCE_RIP(); \
5275 IEM_MC_END(); \
5276 return VINF_SUCCESS; \
5277 \
5278 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5279 } \
5280 } \
5281 else \
5282 { \
5283 switch (pVCpu->iem.s.enmEffOpSize) \
5284 { \
5285 case IEMMODE_16BIT: \
5286 IEM_MC_BEGIN(0, 2); \
5287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5288 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5290 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5291 a_Cnd { \
5292 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5293 } IEM_MC_ENDIF(); \
5294 IEM_MC_ADVANCE_RIP(); \
5295 IEM_MC_END(); \
5296 return VINF_SUCCESS; \
5297 \
5298 case IEMMODE_32BIT: \
5299 IEM_MC_BEGIN(0, 2); \
5300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5301 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5303 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5304 a_Cnd { \
5305 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5306 } IEM_MC_ELSE() { \
5307 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5308 } IEM_MC_ENDIF(); \
5309 IEM_MC_ADVANCE_RIP(); \
5310 IEM_MC_END(); \
5311 return VINF_SUCCESS; \
5312 \
5313 case IEMMODE_64BIT: \
5314 IEM_MC_BEGIN(0, 2); \
5315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5316 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5318 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5319 a_Cnd { \
5320 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5321 } IEM_MC_ENDIF(); \
5322 IEM_MC_ADVANCE_RIP(); \
5323 IEM_MC_END(); \
5324 return VINF_SUCCESS; \
5325 \
5326 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5327 } \
5328 } do {} while (0)
5329
5330
5331
5332/** Opcode 0x0f 0x40. */
5333FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5334{
5335 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5336 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5337}
5338
5339
5340/** Opcode 0x0f 0x41. */
5341FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5342{
5343 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5344 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5345}
5346
5347
5348/** Opcode 0x0f 0x42. */
5349FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5350{
5351 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5352 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5353}
5354
5355
5356/** Opcode 0x0f 0x43. */
5357FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5358{
5359 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5360 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5361}
5362
5363
5364/** Opcode 0x0f 0x44. */
5365FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5366{
5367 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5368 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5369}
5370
5371
5372/** Opcode 0x0f 0x45. */
5373FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5374{
5375 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5376 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5377}
5378
5379
5380/** Opcode 0x0f 0x46. */
5381FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5382{
5383 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5384 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5385}
5386
5387
5388/** Opcode 0x0f 0x47. */
5389FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5390{
5391 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5392 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5393}
5394
5395
5396/** Opcode 0x0f 0x48. */
5397FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5398{
5399 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5400 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5401}
5402
5403
5404/** Opcode 0x0f 0x49. */
5405FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5406{
5407 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5408 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5409}
5410
5411
5412/** Opcode 0x0f 0x4a. */
5413FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5414{
5415 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5416 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5417}
5418
5419
5420/** Opcode 0x0f 0x4b. */
5421FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5422{
5423 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5424 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5425}
5426
5427
5428/** Opcode 0x0f 0x4c. */
5429FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5430{
5431 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5432 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5433}
5434
5435
5436/** Opcode 0x0f 0x4d. */
5437FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5438{
5439 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5440 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5441}
5442
5443
5444/** Opcode 0x0f 0x4e. */
5445FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5446{
5447 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5448 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5449}
5450
5451
5452/** Opcode 0x0f 0x4f. */
5453FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5454{
5455 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5456 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5457}
5458
5459#undef CMOV_X
5460
5461/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5462FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5463{
5464 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /** @todo */
5465 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5466 if (IEM_IS_MODRM_REG_MODE(bRm))
5467 {
5468 /*
5469 * Register, register.
5470 */
5471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5472 IEM_MC_BEGIN(2, 1);
5473 IEM_MC_LOCAL(uint8_t, u8Dst);
5474 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5475 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5476 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5477 IEM_MC_PREPARE_SSE_USAGE();
5478 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5479 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5480 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5481 IEM_MC_ADVANCE_RIP();
5482 IEM_MC_END();
5483 return VINF_SUCCESS;
5484 }
5485
5486 /* No memory operand. */
5487 return IEMOP_RAISE_INVALID_OPCODE();
5488}
5489
5490
5491/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5492FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5493{
5494 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); /** @todo */
5495 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5496 if (IEM_IS_MODRM_REG_MODE(bRm))
5497 {
5498 /*
5499 * Register, register.
5500 */
5501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5502 IEM_MC_BEGIN(2, 1);
5503 IEM_MC_LOCAL(uint8_t, u8Dst);
5504 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5505 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5506 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5507 IEM_MC_PREPARE_SSE_USAGE();
5508 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5509 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5510 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5511 IEM_MC_ADVANCE_RIP();
5512 IEM_MC_END();
5513 return VINF_SUCCESS;
5514 }
5515
5516 /* No memory operand. */
5517 return IEMOP_RAISE_INVALID_OPCODE();
5518
5519}
5520
5521
5522/* Opcode 0xf3 0x0f 0x50 - invalid */
5523/* Opcode 0xf2 0x0f 0x50 - invalid */
5524
5525
5526/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5527FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5528{
5529 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5530 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5531}
5532
5533
5534/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5535FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5536{
5537 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5538 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5539}
5540
5541
5542/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5543FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5544{
5545 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5546 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5547}
5548
5549
5550/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5551FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5552{
5553 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5554 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5555}
5556
5557
5558/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5559FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
5560/* Opcode 0x66 0x0f 0x52 - invalid */
5561/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5562FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
5563/* Opcode 0xf2 0x0f 0x52 - invalid */
5564
5565/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5566FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
5567/* Opcode 0x66 0x0f 0x53 - invalid */
5568/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5569FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
5570/* Opcode 0xf2 0x0f 0x53 - invalid */
5571
5572
5573/** Opcode 0x0f 0x54 - andps Vps, Wps */
5574FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5575{
5576 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5577 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5578}
5579
5580
5581/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5582FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5583{
5584 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5585 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5586}
5587
5588
5589/* Opcode 0xf3 0x0f 0x54 - invalid */
5590/* Opcode 0xf2 0x0f 0x54 - invalid */
5591
5592
5593/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5594FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5595{
5596 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5597 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5598}
5599
5600
5601/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5602FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5603{
5604 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5605 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5606}
5607
5608
5609/* Opcode 0xf3 0x0f 0x55 - invalid */
5610/* Opcode 0xf2 0x0f 0x55 - invalid */
5611
5612
5613/** Opcode 0x0f 0x56 - orps Vps, Wps */
5614FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5615{
5616 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5617 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5618}
5619
5620
5621/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5622FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5623{
5624 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5625 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5626}
5627
5628
5629/* Opcode 0xf3 0x0f 0x56 - invalid */
5630/* Opcode 0xf2 0x0f 0x56 - invalid */
5631
5632
5633/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5634FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5635{
5636 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5637 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5638}
5639
5640
5641/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5642FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5643{
5644 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5645 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5646}
5647
5648
5649/* Opcode 0xf3 0x0f 0x57 - invalid */
5650/* Opcode 0xf2 0x0f 0x57 - invalid */
5651
5652/** Opcode 0x0f 0x58 - addps Vps, Wps */
5653FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5654{
5655 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5656 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5657}
5658
5659
5660/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5661FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5662{
5663 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5664 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5665}
5666
5667
5668/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5669FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5670{
5671 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5672 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5673}
5674
5675
5676/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5677FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5678{
5679 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5680 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5681}
5682
5683
5684/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5685FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5686{
5687 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5688 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5689}
5690
5691
5692/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5693FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5694{
5695 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5696 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5697}
5698
5699
5700/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5701FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5702{
5703 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5704 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5705}
5706
5707
5708/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5709FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5710{
5711 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5712 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5713}
5714
5715
5716/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5717FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5718{
5719 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5720 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5721}
5722
5723
5724/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5725FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5726{
5727 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5728 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5729}
5730
5731
5732/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5733FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5734{
5735 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5736 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5737}
5738
5739
5740/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5741FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5742{
5743 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5744 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5745}
5746
5747
5748/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5749FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5750{
5751 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5752 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5753}
5754
5755
5756/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5757FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5758{
5759 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5760 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5761}
5762
5763
5764/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5765FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5766{
5767 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5768 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5769}
5770
5771
5772/* Opcode 0xf2 0x0f 0x5b - invalid */
5773
5774
5775/** Opcode 0x0f 0x5c - subps Vps, Wps */
5776FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5777{
5778 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5779 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5780}
5781
5782
5783/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5784FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5785{
5786 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5787 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5788}
5789
5790
5791/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5792FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5793{
5794 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5795 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5796}
5797
5798
5799/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5800FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5801{
5802 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5803 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5804}
5805
5806
5807/** Opcode 0x0f 0x5d - minps Vps, Wps */
5808FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5809{
5810 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5811 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5812}
5813
5814
5815/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5816FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5817{
5818 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5819 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5820}
5821
5822
5823/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5824FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5825{
5826 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5827 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5828}
5829
5830
5831/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5832FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5833{
5834 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5835 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5836}
5837
5838
5839/** Opcode 0x0f 0x5e - divps Vps, Wps */
5840FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5841{
5842 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5843 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5844}
5845
5846
5847/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5848FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5849{
5850 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5851 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5852}
5853
5854
5855/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5856FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5857{
5858 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5859 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5860}
5861
5862
5863/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5864FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5865{
5866 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5867 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5868}
5869
5870
5871/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5872FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5873{
5874 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5875 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5876}
5877
5878
5879/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5880FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5881{
5882 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5883 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5884}
5885
5886
5887/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5888FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5889{
5890 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5891 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5892}
5893
5894
5895/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5896FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5897{
5898 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5899 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5900}
5901
5902
5903/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5904FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5905{
5906 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5907 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5908}
5909
5910
5911/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5912FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5913{
5914 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5915 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5916}
5917
5918
5919/* Opcode 0xf3 0x0f 0x60 - invalid */
5920
5921
5922/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5923FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5924{
5925 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5926 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5927 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5928}
5929
5930
5931/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5932FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5933{
5934 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5935 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5936}
5937
5938
5939/* Opcode 0xf3 0x0f 0x61 - invalid */
5940
5941
5942/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5943FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5944{
5945 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5946 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5947}
5948
5949
5950/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5951FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5952{
5953 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5954 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5955}
5956
5957
5958/* Opcode 0xf3 0x0f 0x62 - invalid */
5959
5960
5961
5962/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5963FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5964{
5965 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5966 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5967}
5968
5969
5970/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5971FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5972{
5973 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5974 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5975}
5976
5977
5978/* Opcode 0xf3 0x0f 0x63 - invalid */
5979
5980
5981/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5982FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5983{
5984 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5985 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
5986}
5987
5988
5989/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
5990FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
5991{
5992 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5993 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
5994}
5995
5996
5997/* Opcode 0xf3 0x0f 0x64 - invalid */
5998
5999
6000/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
6001FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
6002{
6003 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6004 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
6005}
6006
6007
6008/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
6009FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
6010{
6011 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6012 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
6013}
6014
6015
6016/* Opcode 0xf3 0x0f 0x65 - invalid */
6017
6018
6019/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
6020FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
6021{
6022 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6023 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
6024}
6025
6026
6027/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
6028FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
6029{
6030 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6031 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
6032}
6033
6034
6035/* Opcode 0xf3 0x0f 0x66 - invalid */
6036
6037
6038/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
6039FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
6040{
6041 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6042 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
6043}
6044
6045
6046/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
6047FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
6048{
6049 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6050 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
6051}
6052
6053
6054/* Opcode 0xf3 0x0f 0x67 - invalid */
6055
6056
6057/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
6058 * @note Intel and AMD both uses Qd for the second parameter, however they
6059 * both list it as a mmX/mem64 operand and intel describes it as being
6060 * loaded as a qword, so it should be Qq, shouldn't it? */
6061FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
6062{
6063 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6064 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
6065}
6066
6067
6068/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
6069FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
6070{
6071 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6072 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
6073}
6074
6075
6076/* Opcode 0xf3 0x0f 0x68 - invalid */
6077
6078
6079/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
6080 * @note Intel and AMD both uses Qd for the second parameter, however they
6081 * both list it as a mmX/mem64 operand and intel describes it as being
6082 * loaded as a qword, so it should be Qq, shouldn't it? */
6083FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
6084{
6085 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6086 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
6087}
6088
6089
6090/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
6091FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
6092{
6093 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6094 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
6095
6096}
6097
6098
6099/* Opcode 0xf3 0x0f 0x69 - invalid */
6100
6101
6102/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
6103 * @note Intel and AMD both uses Qd for the second parameter, however they
6104 * both list it as a mmX/mem64 operand and intel describes it as being
6105 * loaded as a qword, so it should be Qq, shouldn't it? */
6106FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
6107{
6108 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6109 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
6110}
6111
6112
6113/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
6114FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
6115{
6116 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6117 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
6118}
6119
6120
6121/* Opcode 0xf3 0x0f 0x6a - invalid */
6122
6123
6124/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
6125FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
6126{
6127 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6128 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
6129}
6130
6131
6132/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
6133FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
6134{
6135 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6136 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
6137}
6138
6139
6140/* Opcode 0xf3 0x0f 0x6b - invalid */
6141
6142
6143/* Opcode 0x0f 0x6c - invalid */
6144
6145
6146/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
6147FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
6148{
6149 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6150 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
6151}
6152
6153
6154/* Opcode 0xf3 0x0f 0x6c - invalid */
6155/* Opcode 0xf2 0x0f 0x6c - invalid */
6156
6157
6158/* Opcode 0x0f 0x6d - invalid */
6159
6160
6161/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
6162FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
6163{
6164 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6165 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
6166}
6167
6168
6169/* Opcode 0xf3 0x0f 0x6d - invalid */
6170
6171
6172FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
6173{
6174 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6175 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6176 {
6177 /**
6178 * @opcode 0x6e
6179 * @opcodesub rex.w=1
6180 * @oppfx none
6181 * @opcpuid mmx
6182 * @opgroup og_mmx_datamove
6183 * @opxcpttype 5
6184 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6185 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6186 */
6187 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6188 if (IEM_IS_MODRM_REG_MODE(bRm))
6189 {
6190 /* MMX, greg64 */
6191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6192 IEM_MC_BEGIN(0, 1);
6193 IEM_MC_LOCAL(uint64_t, u64Tmp);
6194
6195 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6196 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6197 IEM_MC_FPU_TO_MMX_MODE();
6198
6199 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6200 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6201
6202 IEM_MC_ADVANCE_RIP();
6203 IEM_MC_END();
6204 }
6205 else
6206 {
6207 /* MMX, [mem64] */
6208 IEM_MC_BEGIN(0, 2);
6209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6210 IEM_MC_LOCAL(uint64_t, u64Tmp);
6211
6212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6214 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6215 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6216 IEM_MC_FPU_TO_MMX_MODE();
6217
6218 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6219 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6220
6221 IEM_MC_ADVANCE_RIP();
6222 IEM_MC_END();
6223 }
6224 }
6225 else
6226 {
6227 /**
6228 * @opdone
6229 * @opcode 0x6e
6230 * @opcodesub rex.w=0
6231 * @oppfx none
6232 * @opcpuid mmx
6233 * @opgroup og_mmx_datamove
6234 * @opxcpttype 5
6235 * @opfunction iemOp_movd_q_Pd_Ey
6236 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6237 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6238 */
6239 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6240 if (IEM_IS_MODRM_REG_MODE(bRm))
6241 {
6242 /* MMX, greg */
6243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6244 IEM_MC_BEGIN(0, 1);
6245 IEM_MC_LOCAL(uint64_t, u64Tmp);
6246
6247 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6248 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6249 IEM_MC_FPU_TO_MMX_MODE();
6250
6251 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6252 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6253
6254 IEM_MC_ADVANCE_RIP();
6255 IEM_MC_END();
6256 }
6257 else
6258 {
6259 /* MMX, [mem] */
6260 IEM_MC_BEGIN(0, 2);
6261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6262 IEM_MC_LOCAL(uint32_t, u32Tmp);
6263
6264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6266 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6267 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6268 IEM_MC_FPU_TO_MMX_MODE();
6269
6270 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6271 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6272
6273 IEM_MC_ADVANCE_RIP();
6274 IEM_MC_END();
6275 }
6276 }
6277 return VINF_SUCCESS;
6278}
6279
6280FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6281{
6282 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6283 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6284 {
6285 /**
6286 * @opcode 0x6e
6287 * @opcodesub rex.w=1
6288 * @oppfx 0x66
6289 * @opcpuid sse2
6290 * @opgroup og_sse2_simdint_datamove
6291 * @opxcpttype 5
6292 * @optest 64-bit / op1=1 op2=2 -> op1=2
6293 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6294 */
6295 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6296 if (IEM_IS_MODRM_REG_MODE(bRm))
6297 {
6298 /* XMM, greg64 */
6299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6300 IEM_MC_BEGIN(0, 1);
6301 IEM_MC_LOCAL(uint64_t, u64Tmp);
6302
6303 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6304 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6305
6306 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6307 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6308
6309 IEM_MC_ADVANCE_RIP();
6310 IEM_MC_END();
6311 }
6312 else
6313 {
6314 /* XMM, [mem64] */
6315 IEM_MC_BEGIN(0, 2);
6316 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6317 IEM_MC_LOCAL(uint64_t, u64Tmp);
6318
6319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6321 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6322 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6323
6324 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6325 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6326
6327 IEM_MC_ADVANCE_RIP();
6328 IEM_MC_END();
6329 }
6330 }
6331 else
6332 {
6333 /**
6334 * @opdone
6335 * @opcode 0x6e
6336 * @opcodesub rex.w=0
6337 * @oppfx 0x66
6338 * @opcpuid sse2
6339 * @opgroup og_sse2_simdint_datamove
6340 * @opxcpttype 5
6341 * @opfunction iemOp_movd_q_Vy_Ey
6342 * @optest op1=1 op2=2 -> op1=2
6343 * @optest op1=0 op2=-42 -> op1=-42
6344 */
6345 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6346 if (IEM_IS_MODRM_REG_MODE(bRm))
6347 {
6348 /* XMM, greg32 */
6349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6350 IEM_MC_BEGIN(0, 1);
6351 IEM_MC_LOCAL(uint32_t, u32Tmp);
6352
6353 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6354 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6355
6356 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6357 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6358
6359 IEM_MC_ADVANCE_RIP();
6360 IEM_MC_END();
6361 }
6362 else
6363 {
6364 /* XMM, [mem32] */
6365 IEM_MC_BEGIN(0, 2);
6366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6367 IEM_MC_LOCAL(uint32_t, u32Tmp);
6368
6369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6371 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6372 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6373
6374 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6375 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6376
6377 IEM_MC_ADVANCE_RIP();
6378 IEM_MC_END();
6379 }
6380 }
6381 return VINF_SUCCESS;
6382}
6383
6384/* Opcode 0xf3 0x0f 0x6e - invalid */
6385
6386
6387/**
6388 * @opcode 0x6f
6389 * @oppfx none
6390 * @opcpuid mmx
6391 * @opgroup og_mmx_datamove
6392 * @opxcpttype 5
6393 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6394 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6395 */
6396FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6397{
6398 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6399 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6400 if (IEM_IS_MODRM_REG_MODE(bRm))
6401 {
6402 /*
6403 * Register, register.
6404 */
6405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6406 IEM_MC_BEGIN(0, 1);
6407 IEM_MC_LOCAL(uint64_t, u64Tmp);
6408
6409 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6410 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6411 IEM_MC_FPU_TO_MMX_MODE();
6412
6413 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6414 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6415
6416 IEM_MC_ADVANCE_RIP();
6417 IEM_MC_END();
6418 }
6419 else
6420 {
6421 /*
6422 * Register, memory.
6423 */
6424 IEM_MC_BEGIN(0, 2);
6425 IEM_MC_LOCAL(uint64_t, u64Tmp);
6426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6427
6428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6430 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6431 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6432 IEM_MC_FPU_TO_MMX_MODE();
6433
6434 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6435 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6436
6437 IEM_MC_ADVANCE_RIP();
6438 IEM_MC_END();
6439 }
6440 return VINF_SUCCESS;
6441}
6442
6443/**
6444 * @opcode 0x6f
6445 * @oppfx 0x66
6446 * @opcpuid sse2
6447 * @opgroup og_sse2_simdint_datamove
6448 * @opxcpttype 1
6449 * @optest op1=1 op2=2 -> op1=2
6450 * @optest op1=0 op2=-42 -> op1=-42
6451 */
6452FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6453{
6454 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6456 if (IEM_IS_MODRM_REG_MODE(bRm))
6457 {
6458 /*
6459 * Register, register.
6460 */
6461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6462 IEM_MC_BEGIN(0, 0);
6463
6464 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6465 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6466
6467 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6468 IEM_GET_MODRM_RM(pVCpu, bRm));
6469 IEM_MC_ADVANCE_RIP();
6470 IEM_MC_END();
6471 }
6472 else
6473 {
6474 /*
6475 * Register, memory.
6476 */
6477 IEM_MC_BEGIN(0, 2);
6478 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6480
6481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6483 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6484 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6485
6486 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6487 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6488
6489 IEM_MC_ADVANCE_RIP();
6490 IEM_MC_END();
6491 }
6492 return VINF_SUCCESS;
6493}
6494
6495/**
6496 * @opcode 0x6f
6497 * @oppfx 0xf3
6498 * @opcpuid sse2
6499 * @opgroup og_sse2_simdint_datamove
6500 * @opxcpttype 4UA
6501 * @optest op1=1 op2=2 -> op1=2
6502 * @optest op1=0 op2=-42 -> op1=-42
6503 */
6504FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6505{
6506 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6507 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6508 if (IEM_IS_MODRM_REG_MODE(bRm))
6509 {
6510 /*
6511 * Register, register.
6512 */
6513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6514 IEM_MC_BEGIN(0, 0);
6515 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6516 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6517 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6518 IEM_GET_MODRM_RM(pVCpu, bRm));
6519 IEM_MC_ADVANCE_RIP();
6520 IEM_MC_END();
6521 }
6522 else
6523 {
6524 /*
6525 * Register, memory.
6526 */
6527 IEM_MC_BEGIN(0, 2);
6528 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6530
6531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6533 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6534 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6535 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6536 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6537
6538 IEM_MC_ADVANCE_RIP();
6539 IEM_MC_END();
6540 }
6541 return VINF_SUCCESS;
6542}
6543
6544
6545/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6546FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6547{
6548 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6549 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6550 if (IEM_IS_MODRM_REG_MODE(bRm))
6551 {
6552 /*
6553 * Register, register.
6554 */
6555 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6557
6558 IEM_MC_BEGIN(3, 0);
6559 IEM_MC_ARG(uint64_t *, pDst, 0);
6560 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6561 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6562 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6563 IEM_MC_PREPARE_FPU_USAGE();
6564 IEM_MC_FPU_TO_MMX_MODE();
6565
6566 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6567 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6568 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
6569 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6570
6571 IEM_MC_ADVANCE_RIP();
6572 IEM_MC_END();
6573 }
6574 else
6575 {
6576 /*
6577 * Register, memory.
6578 */
6579 IEM_MC_BEGIN(3, 2);
6580 IEM_MC_ARG(uint64_t *, pDst, 0);
6581 IEM_MC_LOCAL(uint64_t, uSrc);
6582 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6584
6585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6586 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6587 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6589 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6590 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6591
6592 IEM_MC_PREPARE_FPU_USAGE();
6593 IEM_MC_FPU_TO_MMX_MODE();
6594
6595 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6596 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
6597 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6598
6599 IEM_MC_ADVANCE_RIP();
6600 IEM_MC_END();
6601 }
6602 return VINF_SUCCESS;
6603}
6604
6605
6606/**
6607 * Common worker for SSE2 instructions on the forms:
6608 * pshufd xmm1, xmm2/mem128, imm8
6609 * pshufhw xmm1, xmm2/mem128, imm8
6610 * pshuflw xmm1, xmm2/mem128, imm8
6611 *
6612 * Proper alignment of the 128-bit operand is enforced.
6613 * Exceptions type 4. SSE2 cpuid checks.
6614 */
6615FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6616{
6617 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6618 if (IEM_IS_MODRM_REG_MODE(bRm))
6619 {
6620 /*
6621 * Register, register.
6622 */
6623 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6625
6626 IEM_MC_BEGIN(3, 0);
6627 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6628 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6629 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6630 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6631 IEM_MC_PREPARE_SSE_USAGE();
6632 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6633 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6634 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
6635 IEM_MC_ADVANCE_RIP();
6636 IEM_MC_END();
6637 }
6638 else
6639 {
6640 /*
6641 * Register, memory.
6642 */
6643 IEM_MC_BEGIN(3, 2);
6644 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6645 IEM_MC_LOCAL(RTUINT128U, uSrc);
6646 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6648
6649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6650 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
6651 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
6652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6653 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6654
6655 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6656 IEM_MC_PREPARE_SSE_USAGE();
6657 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6658 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
6659
6660 IEM_MC_ADVANCE_RIP();
6661 IEM_MC_END();
6662 }
6663 return VINF_SUCCESS;
6664}
6665
6666
6667/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6668FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6669{
6670 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6671 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6672}
6673
6674
6675/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6676FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6677{
6678 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6679 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6680}
6681
6682
6683/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6684FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6685{
6686 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6687 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6688}
6689
6690
6691/**
6692 * Common worker for MMX instructions of the form:
6693 * psrlw mm, imm8
6694 * psraw mm, imm8
6695 * psllw mm, imm8
6696 * psrld mm, imm8
6697 * psrad mm, imm8
6698 * pslld mm, imm8
6699 * psrlq mm, imm8
6700 * psllq mm, imm8
6701 *
6702 */
6703FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6704{
6705 if (IEM_IS_MODRM_REG_MODE(bRm))
6706 {
6707 /*
6708 * Register, immediate.
6709 */
6710 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6712
6713 IEM_MC_BEGIN(2, 0);
6714 IEM_MC_ARG(uint64_t *, pDst, 0);
6715 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6716 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6717 IEM_MC_PREPARE_FPU_USAGE();
6718 IEM_MC_FPU_TO_MMX_MODE();
6719
6720 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6721 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6722 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6723
6724 IEM_MC_ADVANCE_RIP();
6725 IEM_MC_END();
6726 }
6727 else
6728 {
6729 /*
6730 * Register, memory not supported.
6731 */
6732 /// @todo Caller already enforced register mode?!
6733 }
6734 return VINF_SUCCESS;
6735}
6736
6737
6738/**
6739 * Common worker for SSE2 instructions of the form:
6740 * psrlw xmm, imm8
6741 * psraw xmm, imm8
6742 * psllw xmm, imm8
6743 * psrld xmm, imm8
6744 * psrad xmm, imm8
6745 * pslld xmm, imm8
6746 * psrlq xmm, imm8
6747 * psllq xmm, imm8
6748 *
6749 */
6750FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6751{
6752 if (IEM_IS_MODRM_REG_MODE(bRm))
6753 {
6754 /*
6755 * Register, immediate.
6756 */
6757 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6759
6760 IEM_MC_BEGIN(2, 0);
6761 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6762 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6763 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6764 IEM_MC_PREPARE_SSE_USAGE();
6765 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6766 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6767 IEM_MC_ADVANCE_RIP();
6768 IEM_MC_END();
6769 }
6770 else
6771 {
6772 /*
6773 * Register, memory.
6774 */
6775 /// @todo Caller already enforced register mode?!
6776 }
6777 return VINF_SUCCESS;
6778}
6779
6780
6781/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6782FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6783{
6784// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6785 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6786}
6787
6788
6789/** Opcode 0x66 0x0f 0x71 11/2. */
6790FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6791{
6792// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6793 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6794}
6795
6796
6797/** Opcode 0x0f 0x71 11/4. */
6798FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6799{
6800// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6801 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6802}
6803
6804
6805/** Opcode 0x66 0x0f 0x71 11/4. */
6806FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6807{
6808// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6809 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6810}
6811
6812
6813/** Opcode 0x0f 0x71 11/6. */
6814FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6815{
6816// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6817 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6818}
6819
6820
6821/** Opcode 0x66 0x0f 0x71 11/6. */
6822FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6823{
6824// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6825 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6826}
6827
6828
6829/**
6830 * Group 12 jump table for register variant.
6831 */
6832IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6833{
6834 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6835 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6836 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6837 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6838 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6839 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6840 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6841 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6842};
6843AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6844
6845
6846/** Opcode 0x0f 0x71. */
6847FNIEMOP_DEF(iemOp_Grp12)
6848{
6849 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6850 if (IEM_IS_MODRM_REG_MODE(bRm))
6851 /* register, register */
6852 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6853 + pVCpu->iem.s.idxPrefix], bRm);
6854 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6855}
6856
6857
6858/** Opcode 0x0f 0x72 11/2. */
6859FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6860{
6861// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6862 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6863}
6864
6865
6866/** Opcode 0x66 0x0f 0x72 11/2. */
6867FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6868{
6869// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6870 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6871}
6872
6873
6874/** Opcode 0x0f 0x72 11/4. */
6875FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6876{
6877// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6878 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6879}
6880
6881
6882/** Opcode 0x66 0x0f 0x72 11/4. */
6883FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6884{
6885// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6886 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
6887}
6888
6889
6890/** Opcode 0x0f 0x72 11/6. */
6891FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6892{
6893// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6894 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6895}
6896
6897/** Opcode 0x66 0x0f 0x72 11/6. */
6898FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6899{
6900// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6901 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
6902}
6903
6904
6905/**
6906 * Group 13 jump table for register variant.
6907 */
6908IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6909{
6910 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6911 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6912 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6913 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6914 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6915 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6916 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6917 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6918};
6919AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6920
6921/** Opcode 0x0f 0x72. */
6922FNIEMOP_DEF(iemOp_Grp13)
6923{
6924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6925 if (IEM_IS_MODRM_REG_MODE(bRm))
6926 /* register, register */
6927 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6928 + pVCpu->iem.s.idxPrefix], bRm);
6929 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6930}
6931
6932
6933/** Opcode 0x0f 0x73 11/2. */
6934FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6935{
6936// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6937 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6938}
6939
6940
6941/** Opcode 0x66 0x0f 0x73 11/2. */
6942FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6943{
6944// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6945 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
6946}
6947
6948
6949/** Opcode 0x66 0x0f 0x73 11/3. */
6950FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6951{
6952// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6953 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
6954}
6955
6956
6957/** Opcode 0x0f 0x73 11/6. */
6958FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6959{
6960// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
6961 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6962}
6963
6964
6965/** Opcode 0x66 0x0f 0x73 11/6. */
6966FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6967{
6968// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6969 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
6970}
6971
6972
6973/** Opcode 0x66 0x0f 0x73 11/7. */
6974FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6975{
6976// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
6977 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
6978}
6979
6980/**
6981 * Group 14 jump table for register variant.
6982 */
6983IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6984{
6985 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6986 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6987 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6988 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6989 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6990 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6991 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6992 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6993};
6994AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
6995
6996
6997/** Opcode 0x0f 0x73. */
6998FNIEMOP_DEF(iemOp_Grp14)
6999{
7000 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7001 if (IEM_IS_MODRM_REG_MODE(bRm))
7002 /* register, register */
7003 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
7004 + pVCpu->iem.s.idxPrefix], bRm);
7005 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
7006}
7007
7008
7009/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
7010FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
7011{
7012 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7013 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
7014}
7015
7016
7017/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
7018FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
7019{
7020 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7021 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
7022}
7023
7024
7025/* Opcode 0xf3 0x0f 0x74 - invalid */
7026/* Opcode 0xf2 0x0f 0x74 - invalid */
7027
7028
7029/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
7030FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
7031{
7032 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7033 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
7034}
7035
7036
7037/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
7038FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
7039{
7040 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7041 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
7042}
7043
7044
7045/* Opcode 0xf3 0x0f 0x75 - invalid */
7046/* Opcode 0xf2 0x0f 0x75 - invalid */
7047
7048
7049/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
7050FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
7051{
7052 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7053 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
7054}
7055
7056
7057/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
7058FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
7059{
7060 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7061 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
7062}
7063
7064
7065/* Opcode 0xf3 0x0f 0x76 - invalid */
7066/* Opcode 0xf2 0x0f 0x76 - invalid */
7067
7068
7069/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
7070FNIEMOP_DEF(iemOp_emms)
7071{
7072 IEMOP_MNEMONIC(emms, "emms");
7073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7074
7075 IEM_MC_BEGIN(0,0);
7076 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7077 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7078 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7079 IEM_MC_FPU_FROM_MMX_MODE();
7080 IEM_MC_ADVANCE_RIP();
7081 IEM_MC_END();
7082 return VINF_SUCCESS;
7083}
7084
7085/* Opcode 0x66 0x0f 0x77 - invalid */
7086/* Opcode 0xf3 0x0f 0x77 - invalid */
7087/* Opcode 0xf2 0x0f 0x77 - invalid */
7088
7089/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
7090#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7091FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
7092{
7093 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
7094 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
7095 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
7096 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
7097
7098 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7099 if (IEM_IS_MODRM_REG_MODE(bRm))
7100 {
7101 /*
7102 * Register, register.
7103 */
7104 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7105 if (enmEffOpSize == IEMMODE_64BIT)
7106 {
7107 IEM_MC_BEGIN(2, 0);
7108 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7109 IEM_MC_ARG(uint64_t, u64Enc, 1);
7110 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7111 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7112 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg64, pu64Dst, u64Enc);
7113 IEM_MC_END();
7114 }
7115 else
7116 {
7117 IEM_MC_BEGIN(2, 0);
7118 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7119 IEM_MC_ARG(uint32_t, u32Enc, 1);
7120 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7121 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7122 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg32, pu32Dst, u32Enc);
7123 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7124 IEM_MC_END();
7125 }
7126 }
7127 else
7128 {
7129 /*
7130 * Memory, register.
7131 */
7132 if (enmEffOpSize == IEMMODE_64BIT)
7133 {
7134 IEM_MC_BEGIN(3, 0);
7135 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7136 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7137 IEM_MC_ARG(uint64_t, u64Enc, 2);
7138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7139 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7140 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7141 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7142 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
7143 IEM_MC_END();
7144 }
7145 else
7146 {
7147 IEM_MC_BEGIN(3, 0);
7148 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7149 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7150 IEM_MC_ARG(uint32_t, u32Enc, 2);
7151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7152 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7153 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7154 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7155 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7156 IEM_MC_END();
7157 }
7158 }
7159 return VINF_SUCCESS;
7160}
7161#else
7162FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
7163#endif
7164
7165/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7166FNIEMOP_STUB(iemOp_AmdGrp17);
7167/* Opcode 0xf3 0x0f 0x78 - invalid */
7168/* Opcode 0xf2 0x0f 0x78 - invalid */
7169
7170/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7171#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7172FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7173{
7174 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7175 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7176 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7177 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
7178
7179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7180 if (IEM_IS_MODRM_REG_MODE(bRm))
7181 {
7182 /*
7183 * Register, register.
7184 */
7185 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7186 if (enmEffOpSize == IEMMODE_64BIT)
7187 {
7188 IEM_MC_BEGIN(2, 0);
7189 IEM_MC_ARG(uint64_t, u64Val, 0);
7190 IEM_MC_ARG(uint64_t, u64Enc, 1);
7191 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7192 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7193 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
7194 IEM_MC_END();
7195 }
7196 else
7197 {
7198 IEM_MC_BEGIN(2, 0);
7199 IEM_MC_ARG(uint32_t, u32Val, 0);
7200 IEM_MC_ARG(uint32_t, u32Enc, 1);
7201 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7202 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7203 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
7204 IEM_MC_END();
7205 }
7206 }
7207 else
7208 {
7209 /*
7210 * Register, memory.
7211 */
7212 if (enmEffOpSize == IEMMODE_64BIT)
7213 {
7214 IEM_MC_BEGIN(3, 0);
7215 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7216 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7217 IEM_MC_ARG(uint64_t, u64Enc, 2);
7218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7219 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7220 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7221 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7222 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7223 IEM_MC_END();
7224 }
7225 else
7226 {
7227 IEM_MC_BEGIN(3, 0);
7228 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7229 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7230 IEM_MC_ARG(uint32_t, u32Enc, 2);
7231 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7232 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7233 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7234 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7235 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7236 IEM_MC_END();
7237 }
7238 }
7239 return VINF_SUCCESS;
7240}
7241#else
7242FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
7243#endif
7244/* Opcode 0x66 0x0f 0x79 - invalid */
7245/* Opcode 0xf3 0x0f 0x79 - invalid */
7246/* Opcode 0xf2 0x0f 0x79 - invalid */
7247
7248/* Opcode 0x0f 0x7a - invalid */
7249/* Opcode 0x66 0x0f 0x7a - invalid */
7250/* Opcode 0xf3 0x0f 0x7a - invalid */
7251/* Opcode 0xf2 0x0f 0x7a - invalid */
7252
7253/* Opcode 0x0f 0x7b - invalid */
7254/* Opcode 0x66 0x0f 0x7b - invalid */
7255/* Opcode 0xf3 0x0f 0x7b - invalid */
7256/* Opcode 0xf2 0x0f 0x7b - invalid */
7257
7258/* Opcode 0x0f 0x7c - invalid */
7259
7260
7261/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7262FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7263{
7264 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7265 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7266}
7267
7268
7269/* Opcode 0xf3 0x0f 0x7c - invalid */
7270
7271
7272/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7273FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7274{
7275 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7276 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7277}
7278
7279
7280/* Opcode 0x0f 0x7d - invalid */
7281
7282
7283/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7284FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7285{
7286 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7287 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7288}
7289
7290
7291/* Opcode 0xf3 0x0f 0x7d - invalid */
7292
7293
7294/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7295FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7296{
7297 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7298 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7299}
7300
7301
7302/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7303FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7304{
7305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7306 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7307 {
7308 /**
7309 * @opcode 0x7e
7310 * @opcodesub rex.w=1
7311 * @oppfx none
7312 * @opcpuid mmx
7313 * @opgroup og_mmx_datamove
7314 * @opxcpttype 5
7315 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7316 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7317 */
7318 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7319 if (IEM_IS_MODRM_REG_MODE(bRm))
7320 {
7321 /* greg64, MMX */
7322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7323 IEM_MC_BEGIN(0, 1);
7324 IEM_MC_LOCAL(uint64_t, u64Tmp);
7325
7326 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7327 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7328 IEM_MC_FPU_TO_MMX_MODE();
7329
7330 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7331 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7332
7333 IEM_MC_ADVANCE_RIP();
7334 IEM_MC_END();
7335 }
7336 else
7337 {
7338 /* [mem64], MMX */
7339 IEM_MC_BEGIN(0, 2);
7340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7341 IEM_MC_LOCAL(uint64_t, u64Tmp);
7342
7343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7345 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7346 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7347 IEM_MC_FPU_TO_MMX_MODE();
7348
7349 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7350 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7351
7352 IEM_MC_ADVANCE_RIP();
7353 IEM_MC_END();
7354 }
7355 }
7356 else
7357 {
7358 /**
7359 * @opdone
7360 * @opcode 0x7e
7361 * @opcodesub rex.w=0
7362 * @oppfx none
7363 * @opcpuid mmx
7364 * @opgroup og_mmx_datamove
7365 * @opxcpttype 5
7366 * @opfunction iemOp_movd_q_Pd_Ey
7367 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7368 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7369 */
7370 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7371 if (IEM_IS_MODRM_REG_MODE(bRm))
7372 {
7373 /* greg32, MMX */
7374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7375 IEM_MC_BEGIN(0, 1);
7376 IEM_MC_LOCAL(uint32_t, u32Tmp);
7377
7378 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7379 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7380 IEM_MC_FPU_TO_MMX_MODE();
7381
7382 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7383 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7384
7385 IEM_MC_ADVANCE_RIP();
7386 IEM_MC_END();
7387 }
7388 else
7389 {
7390 /* [mem32], MMX */
7391 IEM_MC_BEGIN(0, 2);
7392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7393 IEM_MC_LOCAL(uint32_t, u32Tmp);
7394
7395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7397 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7398 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7399 IEM_MC_FPU_TO_MMX_MODE();
7400
7401 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7402 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7403
7404 IEM_MC_ADVANCE_RIP();
7405 IEM_MC_END();
7406 }
7407 }
7408 return VINF_SUCCESS;
7409
7410}
7411
7412
7413FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7414{
7415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7416 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7417 {
7418 /**
7419 * @opcode 0x7e
7420 * @opcodesub rex.w=1
7421 * @oppfx 0x66
7422 * @opcpuid sse2
7423 * @opgroup og_sse2_simdint_datamove
7424 * @opxcpttype 5
7425 * @optest 64-bit / op1=1 op2=2 -> op1=2
7426 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7427 */
7428 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7429 if (IEM_IS_MODRM_REG_MODE(bRm))
7430 {
7431 /* greg64, XMM */
7432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7433 IEM_MC_BEGIN(0, 1);
7434 IEM_MC_LOCAL(uint64_t, u64Tmp);
7435
7436 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7437 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7438
7439 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7440 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7441
7442 IEM_MC_ADVANCE_RIP();
7443 IEM_MC_END();
7444 }
7445 else
7446 {
7447 /* [mem64], XMM */
7448 IEM_MC_BEGIN(0, 2);
7449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7450 IEM_MC_LOCAL(uint64_t, u64Tmp);
7451
7452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7454 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7455 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7456
7457 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7458 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7459
7460 IEM_MC_ADVANCE_RIP();
7461 IEM_MC_END();
7462 }
7463 }
7464 else
7465 {
7466 /**
7467 * @opdone
7468 * @opcode 0x7e
7469 * @opcodesub rex.w=0
7470 * @oppfx 0x66
7471 * @opcpuid sse2
7472 * @opgroup og_sse2_simdint_datamove
7473 * @opxcpttype 5
7474 * @opfunction iemOp_movd_q_Vy_Ey
7475 * @optest op1=1 op2=2 -> op1=2
7476 * @optest op1=0 op2=-42 -> op1=-42
7477 */
7478 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7479 if (IEM_IS_MODRM_REG_MODE(bRm))
7480 {
7481 /* greg32, XMM */
7482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7483 IEM_MC_BEGIN(0, 1);
7484 IEM_MC_LOCAL(uint32_t, u32Tmp);
7485
7486 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7487 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7488
7489 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7490 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7491
7492 IEM_MC_ADVANCE_RIP();
7493 IEM_MC_END();
7494 }
7495 else
7496 {
7497 /* [mem32], XMM */
7498 IEM_MC_BEGIN(0, 2);
7499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7500 IEM_MC_LOCAL(uint32_t, u32Tmp);
7501
7502 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7504 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7505 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7506
7507 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7508 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7509
7510 IEM_MC_ADVANCE_RIP();
7511 IEM_MC_END();
7512 }
7513 }
7514 return VINF_SUCCESS;
7515
7516}
7517
7518/**
7519 * @opcode 0x7e
7520 * @oppfx 0xf3
7521 * @opcpuid sse2
7522 * @opgroup og_sse2_pcksclr_datamove
7523 * @opxcpttype none
7524 * @optest op1=1 op2=2 -> op1=2
7525 * @optest op1=0 op2=-42 -> op1=-42
7526 */
7527FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7528{
7529 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7530 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7531 if (IEM_IS_MODRM_REG_MODE(bRm))
7532 {
7533 /*
7534 * Register, register.
7535 */
7536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7537 IEM_MC_BEGIN(0, 2);
7538 IEM_MC_LOCAL(uint64_t, uSrc);
7539
7540 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7541 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7542
7543 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
7544 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7545
7546 IEM_MC_ADVANCE_RIP();
7547 IEM_MC_END();
7548 }
7549 else
7550 {
7551 /*
7552 * Memory, register.
7553 */
7554 IEM_MC_BEGIN(0, 2);
7555 IEM_MC_LOCAL(uint64_t, uSrc);
7556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7557
7558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7560 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7561 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7562
7563 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7564 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7565
7566 IEM_MC_ADVANCE_RIP();
7567 IEM_MC_END();
7568 }
7569 return VINF_SUCCESS;
7570}
7571
7572/* Opcode 0xf2 0x0f 0x7e - invalid */
7573
7574
7575/** Opcode 0x0f 0x7f - movq Qq, Pq */
7576FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7577{
7578 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7579 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7580 if (IEM_IS_MODRM_REG_MODE(bRm))
7581 {
7582 /*
7583 * Register, register.
7584 */
7585 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7586 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7588 IEM_MC_BEGIN(0, 1);
7589 IEM_MC_LOCAL(uint64_t, u64Tmp);
7590 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7591 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7592 IEM_MC_FPU_TO_MMX_MODE();
7593
7594 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7595 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7596
7597 IEM_MC_ADVANCE_RIP();
7598 IEM_MC_END();
7599 }
7600 else
7601 {
7602 /*
7603 * Memory, Register.
7604 */
7605 IEM_MC_BEGIN(0, 2);
7606 IEM_MC_LOCAL(uint64_t, u64Tmp);
7607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7608
7609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7611 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7612 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7613 IEM_MC_FPU_TO_MMX_MODE();
7614
7615 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7616 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7617
7618 IEM_MC_ADVANCE_RIP();
7619 IEM_MC_END();
7620 }
7621 return VINF_SUCCESS;
7622}
7623
7624/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7625FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7626{
7627 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7628 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7629 if (IEM_IS_MODRM_REG_MODE(bRm))
7630 {
7631 /*
7632 * Register, register.
7633 */
7634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7635 IEM_MC_BEGIN(0, 0);
7636 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7637 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7638 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7639 IEM_GET_MODRM_REG(pVCpu, bRm));
7640 IEM_MC_ADVANCE_RIP();
7641 IEM_MC_END();
7642 }
7643 else
7644 {
7645 /*
7646 * Register, memory.
7647 */
7648 IEM_MC_BEGIN(0, 2);
7649 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7651
7652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7654 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7655 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7656
7657 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7658 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7659
7660 IEM_MC_ADVANCE_RIP();
7661 IEM_MC_END();
7662 }
7663 return VINF_SUCCESS;
7664}
7665
7666/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7667FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7668{
7669 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7670 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7671 if (IEM_IS_MODRM_REG_MODE(bRm))
7672 {
7673 /*
7674 * Register, register.
7675 */
7676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7677 IEM_MC_BEGIN(0, 0);
7678 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7679 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7680 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7681 IEM_GET_MODRM_REG(pVCpu, bRm));
7682 IEM_MC_ADVANCE_RIP();
7683 IEM_MC_END();
7684 }
7685 else
7686 {
7687 /*
7688 * Register, memory.
7689 */
7690 IEM_MC_BEGIN(0, 2);
7691 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7692 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7693
7694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7696 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7697 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7698
7699 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7700 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7701
7702 IEM_MC_ADVANCE_RIP();
7703 IEM_MC_END();
7704 }
7705 return VINF_SUCCESS;
7706}
7707
7708/* Opcode 0xf2 0x0f 0x7f - invalid */
7709
7710
7711
7712/** Opcode 0x0f 0x80. */
7713FNIEMOP_DEF(iemOp_jo_Jv)
7714{
7715 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7716 IEMOP_HLP_MIN_386();
7717 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7718 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7719 {
7720 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7722
7723 IEM_MC_BEGIN(0, 0);
7724 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7725 IEM_MC_REL_JMP_S16(i16Imm);
7726 } IEM_MC_ELSE() {
7727 IEM_MC_ADVANCE_RIP();
7728 } IEM_MC_ENDIF();
7729 IEM_MC_END();
7730 }
7731 else
7732 {
7733 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7735
7736 IEM_MC_BEGIN(0, 0);
7737 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7738 IEM_MC_REL_JMP_S32(i32Imm);
7739 } IEM_MC_ELSE() {
7740 IEM_MC_ADVANCE_RIP();
7741 } IEM_MC_ENDIF();
7742 IEM_MC_END();
7743 }
7744 return VINF_SUCCESS;
7745}
7746
7747
7748/** Opcode 0x0f 0x81. */
7749FNIEMOP_DEF(iemOp_jno_Jv)
7750{
7751 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7752 IEMOP_HLP_MIN_386();
7753 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7754 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7755 {
7756 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7758
7759 IEM_MC_BEGIN(0, 0);
7760 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7761 IEM_MC_ADVANCE_RIP();
7762 } IEM_MC_ELSE() {
7763 IEM_MC_REL_JMP_S16(i16Imm);
7764 } IEM_MC_ENDIF();
7765 IEM_MC_END();
7766 }
7767 else
7768 {
7769 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7771
7772 IEM_MC_BEGIN(0, 0);
7773 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7774 IEM_MC_ADVANCE_RIP();
7775 } IEM_MC_ELSE() {
7776 IEM_MC_REL_JMP_S32(i32Imm);
7777 } IEM_MC_ENDIF();
7778 IEM_MC_END();
7779 }
7780 return VINF_SUCCESS;
7781}
7782
7783
7784/** Opcode 0x0f 0x82. */
7785FNIEMOP_DEF(iemOp_jc_Jv)
7786{
7787 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7788 IEMOP_HLP_MIN_386();
7789 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7790 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7791 {
7792 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7794
7795 IEM_MC_BEGIN(0, 0);
7796 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7797 IEM_MC_REL_JMP_S16(i16Imm);
7798 } IEM_MC_ELSE() {
7799 IEM_MC_ADVANCE_RIP();
7800 } IEM_MC_ENDIF();
7801 IEM_MC_END();
7802 }
7803 else
7804 {
7805 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7807
7808 IEM_MC_BEGIN(0, 0);
7809 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7810 IEM_MC_REL_JMP_S32(i32Imm);
7811 } IEM_MC_ELSE() {
7812 IEM_MC_ADVANCE_RIP();
7813 } IEM_MC_ENDIF();
7814 IEM_MC_END();
7815 }
7816 return VINF_SUCCESS;
7817}
7818
7819
7820/** Opcode 0x0f 0x83. */
7821FNIEMOP_DEF(iemOp_jnc_Jv)
7822{
7823 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7824 IEMOP_HLP_MIN_386();
7825 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7826 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7827 {
7828 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7830
7831 IEM_MC_BEGIN(0, 0);
7832 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7833 IEM_MC_ADVANCE_RIP();
7834 } IEM_MC_ELSE() {
7835 IEM_MC_REL_JMP_S16(i16Imm);
7836 } IEM_MC_ENDIF();
7837 IEM_MC_END();
7838 }
7839 else
7840 {
7841 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7843
7844 IEM_MC_BEGIN(0, 0);
7845 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7846 IEM_MC_ADVANCE_RIP();
7847 } IEM_MC_ELSE() {
7848 IEM_MC_REL_JMP_S32(i32Imm);
7849 } IEM_MC_ENDIF();
7850 IEM_MC_END();
7851 }
7852 return VINF_SUCCESS;
7853}
7854
7855
7856/** Opcode 0x0f 0x84. */
7857FNIEMOP_DEF(iemOp_je_Jv)
7858{
7859 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7860 IEMOP_HLP_MIN_386();
7861 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7862 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7863 {
7864 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7866
7867 IEM_MC_BEGIN(0, 0);
7868 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7869 IEM_MC_REL_JMP_S16(i16Imm);
7870 } IEM_MC_ELSE() {
7871 IEM_MC_ADVANCE_RIP();
7872 } IEM_MC_ENDIF();
7873 IEM_MC_END();
7874 }
7875 else
7876 {
7877 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7879
7880 IEM_MC_BEGIN(0, 0);
7881 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7882 IEM_MC_REL_JMP_S32(i32Imm);
7883 } IEM_MC_ELSE() {
7884 IEM_MC_ADVANCE_RIP();
7885 } IEM_MC_ENDIF();
7886 IEM_MC_END();
7887 }
7888 return VINF_SUCCESS;
7889}
7890
7891
7892/** Opcode 0x0f 0x85. */
7893FNIEMOP_DEF(iemOp_jne_Jv)
7894{
7895 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7896 IEMOP_HLP_MIN_386();
7897 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7898 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7899 {
7900 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7902
7903 IEM_MC_BEGIN(0, 0);
7904 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7905 IEM_MC_ADVANCE_RIP();
7906 } IEM_MC_ELSE() {
7907 IEM_MC_REL_JMP_S16(i16Imm);
7908 } IEM_MC_ENDIF();
7909 IEM_MC_END();
7910 }
7911 else
7912 {
7913 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7915
7916 IEM_MC_BEGIN(0, 0);
7917 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7918 IEM_MC_ADVANCE_RIP();
7919 } IEM_MC_ELSE() {
7920 IEM_MC_REL_JMP_S32(i32Imm);
7921 } IEM_MC_ENDIF();
7922 IEM_MC_END();
7923 }
7924 return VINF_SUCCESS;
7925}
7926
7927
7928/** Opcode 0x0f 0x86. */
7929FNIEMOP_DEF(iemOp_jbe_Jv)
7930{
7931 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7932 IEMOP_HLP_MIN_386();
7933 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7934 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7935 {
7936 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7938
7939 IEM_MC_BEGIN(0, 0);
7940 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7941 IEM_MC_REL_JMP_S16(i16Imm);
7942 } IEM_MC_ELSE() {
7943 IEM_MC_ADVANCE_RIP();
7944 } IEM_MC_ENDIF();
7945 IEM_MC_END();
7946 }
7947 else
7948 {
7949 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7951
7952 IEM_MC_BEGIN(0, 0);
7953 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7954 IEM_MC_REL_JMP_S32(i32Imm);
7955 } IEM_MC_ELSE() {
7956 IEM_MC_ADVANCE_RIP();
7957 } IEM_MC_ENDIF();
7958 IEM_MC_END();
7959 }
7960 return VINF_SUCCESS;
7961}
7962
7963
7964/** Opcode 0x0f 0x87. */
7965FNIEMOP_DEF(iemOp_jnbe_Jv)
7966{
7967 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7968 IEMOP_HLP_MIN_386();
7969 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7970 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7971 {
7972 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7974
7975 IEM_MC_BEGIN(0, 0);
7976 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7977 IEM_MC_ADVANCE_RIP();
7978 } IEM_MC_ELSE() {
7979 IEM_MC_REL_JMP_S16(i16Imm);
7980 } IEM_MC_ENDIF();
7981 IEM_MC_END();
7982 }
7983 else
7984 {
7985 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7987
7988 IEM_MC_BEGIN(0, 0);
7989 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7990 IEM_MC_ADVANCE_RIP();
7991 } IEM_MC_ELSE() {
7992 IEM_MC_REL_JMP_S32(i32Imm);
7993 } IEM_MC_ENDIF();
7994 IEM_MC_END();
7995 }
7996 return VINF_SUCCESS;
7997}
7998
7999
8000/** Opcode 0x0f 0x88. */
8001FNIEMOP_DEF(iemOp_js_Jv)
8002{
8003 IEMOP_MNEMONIC(js_Jv, "js Jv");
8004 IEMOP_HLP_MIN_386();
8005 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8006 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8007 {
8008 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8010
8011 IEM_MC_BEGIN(0, 0);
8012 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8013 IEM_MC_REL_JMP_S16(i16Imm);
8014 } IEM_MC_ELSE() {
8015 IEM_MC_ADVANCE_RIP();
8016 } IEM_MC_ENDIF();
8017 IEM_MC_END();
8018 }
8019 else
8020 {
8021 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8023
8024 IEM_MC_BEGIN(0, 0);
8025 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8026 IEM_MC_REL_JMP_S32(i32Imm);
8027 } IEM_MC_ELSE() {
8028 IEM_MC_ADVANCE_RIP();
8029 } IEM_MC_ENDIF();
8030 IEM_MC_END();
8031 }
8032 return VINF_SUCCESS;
8033}
8034
8035
8036/** Opcode 0x0f 0x89. */
8037FNIEMOP_DEF(iemOp_jns_Jv)
8038{
8039 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
8040 IEMOP_HLP_MIN_386();
8041 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8042 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8043 {
8044 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8046
8047 IEM_MC_BEGIN(0, 0);
8048 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8049 IEM_MC_ADVANCE_RIP();
8050 } IEM_MC_ELSE() {
8051 IEM_MC_REL_JMP_S16(i16Imm);
8052 } IEM_MC_ENDIF();
8053 IEM_MC_END();
8054 }
8055 else
8056 {
8057 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8059
8060 IEM_MC_BEGIN(0, 0);
8061 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8062 IEM_MC_ADVANCE_RIP();
8063 } IEM_MC_ELSE() {
8064 IEM_MC_REL_JMP_S32(i32Imm);
8065 } IEM_MC_ENDIF();
8066 IEM_MC_END();
8067 }
8068 return VINF_SUCCESS;
8069}
8070
8071
8072/** Opcode 0x0f 0x8a. */
8073FNIEMOP_DEF(iemOp_jp_Jv)
8074{
8075 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
8076 IEMOP_HLP_MIN_386();
8077 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8078 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8079 {
8080 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8082
8083 IEM_MC_BEGIN(0, 0);
8084 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8085 IEM_MC_REL_JMP_S16(i16Imm);
8086 } IEM_MC_ELSE() {
8087 IEM_MC_ADVANCE_RIP();
8088 } IEM_MC_ENDIF();
8089 IEM_MC_END();
8090 }
8091 else
8092 {
8093 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8095
8096 IEM_MC_BEGIN(0, 0);
8097 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8098 IEM_MC_REL_JMP_S32(i32Imm);
8099 } IEM_MC_ELSE() {
8100 IEM_MC_ADVANCE_RIP();
8101 } IEM_MC_ENDIF();
8102 IEM_MC_END();
8103 }
8104 return VINF_SUCCESS;
8105}
8106
8107
8108/** Opcode 0x0f 0x8b. */
8109FNIEMOP_DEF(iemOp_jnp_Jv)
8110{
8111 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
8112 IEMOP_HLP_MIN_386();
8113 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8114 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8115 {
8116 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8118
8119 IEM_MC_BEGIN(0, 0);
8120 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8121 IEM_MC_ADVANCE_RIP();
8122 } IEM_MC_ELSE() {
8123 IEM_MC_REL_JMP_S16(i16Imm);
8124 } IEM_MC_ENDIF();
8125 IEM_MC_END();
8126 }
8127 else
8128 {
8129 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8131
8132 IEM_MC_BEGIN(0, 0);
8133 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8134 IEM_MC_ADVANCE_RIP();
8135 } IEM_MC_ELSE() {
8136 IEM_MC_REL_JMP_S32(i32Imm);
8137 } IEM_MC_ENDIF();
8138 IEM_MC_END();
8139 }
8140 return VINF_SUCCESS;
8141}
8142
8143
8144/** Opcode 0x0f 0x8c. */
8145FNIEMOP_DEF(iemOp_jl_Jv)
8146{
8147 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
8148 IEMOP_HLP_MIN_386();
8149 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8150 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8151 {
8152 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8154
8155 IEM_MC_BEGIN(0, 0);
8156 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8157 IEM_MC_REL_JMP_S16(i16Imm);
8158 } IEM_MC_ELSE() {
8159 IEM_MC_ADVANCE_RIP();
8160 } IEM_MC_ENDIF();
8161 IEM_MC_END();
8162 }
8163 else
8164 {
8165 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8167
8168 IEM_MC_BEGIN(0, 0);
8169 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8170 IEM_MC_REL_JMP_S32(i32Imm);
8171 } IEM_MC_ELSE() {
8172 IEM_MC_ADVANCE_RIP();
8173 } IEM_MC_ENDIF();
8174 IEM_MC_END();
8175 }
8176 return VINF_SUCCESS;
8177}
8178
8179
8180/** Opcode 0x0f 0x8d. */
8181FNIEMOP_DEF(iemOp_jnl_Jv)
8182{
8183 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8184 IEMOP_HLP_MIN_386();
8185 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8186 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8187 {
8188 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8190
8191 IEM_MC_BEGIN(0, 0);
8192 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8193 IEM_MC_ADVANCE_RIP();
8194 } IEM_MC_ELSE() {
8195 IEM_MC_REL_JMP_S16(i16Imm);
8196 } IEM_MC_ENDIF();
8197 IEM_MC_END();
8198 }
8199 else
8200 {
8201 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8203
8204 IEM_MC_BEGIN(0, 0);
8205 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8206 IEM_MC_ADVANCE_RIP();
8207 } IEM_MC_ELSE() {
8208 IEM_MC_REL_JMP_S32(i32Imm);
8209 } IEM_MC_ENDIF();
8210 IEM_MC_END();
8211 }
8212 return VINF_SUCCESS;
8213}
8214
8215
8216/** Opcode 0x0f 0x8e. */
8217FNIEMOP_DEF(iemOp_jle_Jv)
8218{
8219 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8220 IEMOP_HLP_MIN_386();
8221 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8222 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8223 {
8224 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8226
8227 IEM_MC_BEGIN(0, 0);
8228 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8229 IEM_MC_REL_JMP_S16(i16Imm);
8230 } IEM_MC_ELSE() {
8231 IEM_MC_ADVANCE_RIP();
8232 } IEM_MC_ENDIF();
8233 IEM_MC_END();
8234 }
8235 else
8236 {
8237 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8239
8240 IEM_MC_BEGIN(0, 0);
8241 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8242 IEM_MC_REL_JMP_S32(i32Imm);
8243 } IEM_MC_ELSE() {
8244 IEM_MC_ADVANCE_RIP();
8245 } IEM_MC_ENDIF();
8246 IEM_MC_END();
8247 }
8248 return VINF_SUCCESS;
8249}
8250
8251
8252/** Opcode 0x0f 0x8f. */
8253FNIEMOP_DEF(iemOp_jnle_Jv)
8254{
8255 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8256 IEMOP_HLP_MIN_386();
8257 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8258 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8259 {
8260 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8262
8263 IEM_MC_BEGIN(0, 0);
8264 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8265 IEM_MC_ADVANCE_RIP();
8266 } IEM_MC_ELSE() {
8267 IEM_MC_REL_JMP_S16(i16Imm);
8268 } IEM_MC_ENDIF();
8269 IEM_MC_END();
8270 }
8271 else
8272 {
8273 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8275
8276 IEM_MC_BEGIN(0, 0);
8277 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8278 IEM_MC_ADVANCE_RIP();
8279 } IEM_MC_ELSE() {
8280 IEM_MC_REL_JMP_S32(i32Imm);
8281 } IEM_MC_ENDIF();
8282 IEM_MC_END();
8283 }
8284 return VINF_SUCCESS;
8285}
8286
8287
8288/** Opcode 0x0f 0x90. */
8289FNIEMOP_DEF(iemOp_seto_Eb)
8290{
8291 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8292 IEMOP_HLP_MIN_386();
8293 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8294
8295 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8296 * any way. AMD says it's "unused", whatever that means. We're
8297 * ignoring for now. */
8298 if (IEM_IS_MODRM_REG_MODE(bRm))
8299 {
8300 /* register target */
8301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8302 IEM_MC_BEGIN(0, 0);
8303 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8304 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8305 } IEM_MC_ELSE() {
8306 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8307 } IEM_MC_ENDIF();
8308 IEM_MC_ADVANCE_RIP();
8309 IEM_MC_END();
8310 }
8311 else
8312 {
8313 /* memory target */
8314 IEM_MC_BEGIN(0, 1);
8315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8318 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8319 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8320 } IEM_MC_ELSE() {
8321 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8322 } IEM_MC_ENDIF();
8323 IEM_MC_ADVANCE_RIP();
8324 IEM_MC_END();
8325 }
8326 return VINF_SUCCESS;
8327}
8328
8329
8330/** Opcode 0x0f 0x91. */
8331FNIEMOP_DEF(iemOp_setno_Eb)
8332{
8333 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8334 IEMOP_HLP_MIN_386();
8335 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8336
8337 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8338 * any way. AMD says it's "unused", whatever that means. We're
8339 * ignoring for now. */
8340 if (IEM_IS_MODRM_REG_MODE(bRm))
8341 {
8342 /* register target */
8343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8344 IEM_MC_BEGIN(0, 0);
8345 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8346 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8347 } IEM_MC_ELSE() {
8348 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8349 } IEM_MC_ENDIF();
8350 IEM_MC_ADVANCE_RIP();
8351 IEM_MC_END();
8352 }
8353 else
8354 {
8355 /* memory target */
8356 IEM_MC_BEGIN(0, 1);
8357 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8360 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8361 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8362 } IEM_MC_ELSE() {
8363 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8364 } IEM_MC_ENDIF();
8365 IEM_MC_ADVANCE_RIP();
8366 IEM_MC_END();
8367 }
8368 return VINF_SUCCESS;
8369}
8370
8371
8372/** Opcode 0x0f 0x92. */
8373FNIEMOP_DEF(iemOp_setc_Eb)
8374{
8375 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8376 IEMOP_HLP_MIN_386();
8377 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8378
8379 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8380 * any way. AMD says it's "unused", whatever that means. We're
8381 * ignoring for now. */
8382 if (IEM_IS_MODRM_REG_MODE(bRm))
8383 {
8384 /* register target */
8385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8386 IEM_MC_BEGIN(0, 0);
8387 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8388 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8389 } IEM_MC_ELSE() {
8390 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8391 } IEM_MC_ENDIF();
8392 IEM_MC_ADVANCE_RIP();
8393 IEM_MC_END();
8394 }
8395 else
8396 {
8397 /* memory target */
8398 IEM_MC_BEGIN(0, 1);
8399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8402 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8403 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8404 } IEM_MC_ELSE() {
8405 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8406 } IEM_MC_ENDIF();
8407 IEM_MC_ADVANCE_RIP();
8408 IEM_MC_END();
8409 }
8410 return VINF_SUCCESS;
8411}
8412
8413
8414/** Opcode 0x0f 0x93. */
8415FNIEMOP_DEF(iemOp_setnc_Eb)
8416{
8417 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8418 IEMOP_HLP_MIN_386();
8419 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8420
8421 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8422 * any way. AMD says it's "unused", whatever that means. We're
8423 * ignoring for now. */
8424 if (IEM_IS_MODRM_REG_MODE(bRm))
8425 {
8426 /* register target */
8427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8428 IEM_MC_BEGIN(0, 0);
8429 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8430 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8431 } IEM_MC_ELSE() {
8432 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8433 } IEM_MC_ENDIF();
8434 IEM_MC_ADVANCE_RIP();
8435 IEM_MC_END();
8436 }
8437 else
8438 {
8439 /* memory target */
8440 IEM_MC_BEGIN(0, 1);
8441 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8442 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8444 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8445 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8446 } IEM_MC_ELSE() {
8447 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8448 } IEM_MC_ENDIF();
8449 IEM_MC_ADVANCE_RIP();
8450 IEM_MC_END();
8451 }
8452 return VINF_SUCCESS;
8453}
8454
8455
8456/** Opcode 0x0f 0x94. */
8457FNIEMOP_DEF(iemOp_sete_Eb)
8458{
8459 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8460 IEMOP_HLP_MIN_386();
8461 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8462
8463 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8464 * any way. AMD says it's "unused", whatever that means. We're
8465 * ignoring for now. */
8466 if (IEM_IS_MODRM_REG_MODE(bRm))
8467 {
8468 /* register target */
8469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8470 IEM_MC_BEGIN(0, 0);
8471 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8472 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8473 } IEM_MC_ELSE() {
8474 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8475 } IEM_MC_ENDIF();
8476 IEM_MC_ADVANCE_RIP();
8477 IEM_MC_END();
8478 }
8479 else
8480 {
8481 /* memory target */
8482 IEM_MC_BEGIN(0, 1);
8483 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8486 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8487 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8488 } IEM_MC_ELSE() {
8489 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8490 } IEM_MC_ENDIF();
8491 IEM_MC_ADVANCE_RIP();
8492 IEM_MC_END();
8493 }
8494 return VINF_SUCCESS;
8495}
8496
8497
8498/** Opcode 0x0f 0x95. */
8499FNIEMOP_DEF(iemOp_setne_Eb)
8500{
8501 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8502 IEMOP_HLP_MIN_386();
8503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8504
8505 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8506 * any way. AMD says it's "unused", whatever that means. We're
8507 * ignoring for now. */
8508 if (IEM_IS_MODRM_REG_MODE(bRm))
8509 {
8510 /* register target */
8511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8512 IEM_MC_BEGIN(0, 0);
8513 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8514 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8515 } IEM_MC_ELSE() {
8516 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8517 } IEM_MC_ENDIF();
8518 IEM_MC_ADVANCE_RIP();
8519 IEM_MC_END();
8520 }
8521 else
8522 {
8523 /* memory target */
8524 IEM_MC_BEGIN(0, 1);
8525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8528 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8529 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8530 } IEM_MC_ELSE() {
8531 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8532 } IEM_MC_ENDIF();
8533 IEM_MC_ADVANCE_RIP();
8534 IEM_MC_END();
8535 }
8536 return VINF_SUCCESS;
8537}
8538
8539
8540/** Opcode 0x0f 0x96. */
8541FNIEMOP_DEF(iemOp_setbe_Eb)
8542{
8543 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8544 IEMOP_HLP_MIN_386();
8545 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8546
8547 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8548 * any way. AMD says it's "unused", whatever that means. We're
8549 * ignoring for now. */
8550 if (IEM_IS_MODRM_REG_MODE(bRm))
8551 {
8552 /* register target */
8553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8554 IEM_MC_BEGIN(0, 0);
8555 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8556 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8557 } IEM_MC_ELSE() {
8558 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8559 } IEM_MC_ENDIF();
8560 IEM_MC_ADVANCE_RIP();
8561 IEM_MC_END();
8562 }
8563 else
8564 {
8565 /* memory target */
8566 IEM_MC_BEGIN(0, 1);
8567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8570 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8571 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8572 } IEM_MC_ELSE() {
8573 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8574 } IEM_MC_ENDIF();
8575 IEM_MC_ADVANCE_RIP();
8576 IEM_MC_END();
8577 }
8578 return VINF_SUCCESS;
8579}
8580
8581
8582/** Opcode 0x0f 0x97. */
8583FNIEMOP_DEF(iemOp_setnbe_Eb)
8584{
8585 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8586 IEMOP_HLP_MIN_386();
8587 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8588
8589 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8590 * any way. AMD says it's "unused", whatever that means. We're
8591 * ignoring for now. */
8592 if (IEM_IS_MODRM_REG_MODE(bRm))
8593 {
8594 /* register target */
8595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8596 IEM_MC_BEGIN(0, 0);
8597 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8598 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8599 } IEM_MC_ELSE() {
8600 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8601 } IEM_MC_ENDIF();
8602 IEM_MC_ADVANCE_RIP();
8603 IEM_MC_END();
8604 }
8605 else
8606 {
8607 /* memory target */
8608 IEM_MC_BEGIN(0, 1);
8609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8612 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8613 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8614 } IEM_MC_ELSE() {
8615 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8616 } IEM_MC_ENDIF();
8617 IEM_MC_ADVANCE_RIP();
8618 IEM_MC_END();
8619 }
8620 return VINF_SUCCESS;
8621}
8622
8623
8624/** Opcode 0x0f 0x98. */
8625FNIEMOP_DEF(iemOp_sets_Eb)
8626{
8627 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8628 IEMOP_HLP_MIN_386();
8629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8630
8631 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8632 * any way. AMD says it's "unused", whatever that means. We're
8633 * ignoring for now. */
8634 if (IEM_IS_MODRM_REG_MODE(bRm))
8635 {
8636 /* register target */
8637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8638 IEM_MC_BEGIN(0, 0);
8639 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8640 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8641 } IEM_MC_ELSE() {
8642 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8643 } IEM_MC_ENDIF();
8644 IEM_MC_ADVANCE_RIP();
8645 IEM_MC_END();
8646 }
8647 else
8648 {
8649 /* memory target */
8650 IEM_MC_BEGIN(0, 1);
8651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8654 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8655 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8656 } IEM_MC_ELSE() {
8657 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8658 } IEM_MC_ENDIF();
8659 IEM_MC_ADVANCE_RIP();
8660 IEM_MC_END();
8661 }
8662 return VINF_SUCCESS;
8663}
8664
8665
8666/** Opcode 0x0f 0x99. */
8667FNIEMOP_DEF(iemOp_setns_Eb)
8668{
8669 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8670 IEMOP_HLP_MIN_386();
8671 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8672
8673 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8674 * any way. AMD says it's "unused", whatever that means. We're
8675 * ignoring for now. */
8676 if (IEM_IS_MODRM_REG_MODE(bRm))
8677 {
8678 /* register target */
8679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8680 IEM_MC_BEGIN(0, 0);
8681 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8682 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8683 } IEM_MC_ELSE() {
8684 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8685 } IEM_MC_ENDIF();
8686 IEM_MC_ADVANCE_RIP();
8687 IEM_MC_END();
8688 }
8689 else
8690 {
8691 /* memory target */
8692 IEM_MC_BEGIN(0, 1);
8693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8696 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8697 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8698 } IEM_MC_ELSE() {
8699 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8700 } IEM_MC_ENDIF();
8701 IEM_MC_ADVANCE_RIP();
8702 IEM_MC_END();
8703 }
8704 return VINF_SUCCESS;
8705}
8706
8707
8708/** Opcode 0x0f 0x9a. */
8709FNIEMOP_DEF(iemOp_setp_Eb)
8710{
8711 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8712 IEMOP_HLP_MIN_386();
8713 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8714
8715 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8716 * any way. AMD says it's "unused", whatever that means. We're
8717 * ignoring for now. */
8718 if (IEM_IS_MODRM_REG_MODE(bRm))
8719 {
8720 /* register target */
8721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8722 IEM_MC_BEGIN(0, 0);
8723 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8724 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8725 } IEM_MC_ELSE() {
8726 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8727 } IEM_MC_ENDIF();
8728 IEM_MC_ADVANCE_RIP();
8729 IEM_MC_END();
8730 }
8731 else
8732 {
8733 /* memory target */
8734 IEM_MC_BEGIN(0, 1);
8735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8738 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8739 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8740 } IEM_MC_ELSE() {
8741 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8742 } IEM_MC_ENDIF();
8743 IEM_MC_ADVANCE_RIP();
8744 IEM_MC_END();
8745 }
8746 return VINF_SUCCESS;
8747}
8748
8749
8750/** Opcode 0x0f 0x9b. */
8751FNIEMOP_DEF(iemOp_setnp_Eb)
8752{
8753 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8754 IEMOP_HLP_MIN_386();
8755 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8756
8757 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8758 * any way. AMD says it's "unused", whatever that means. We're
8759 * ignoring for now. */
8760 if (IEM_IS_MODRM_REG_MODE(bRm))
8761 {
8762 /* register target */
8763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8764 IEM_MC_BEGIN(0, 0);
8765 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8766 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8767 } IEM_MC_ELSE() {
8768 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8769 } IEM_MC_ENDIF();
8770 IEM_MC_ADVANCE_RIP();
8771 IEM_MC_END();
8772 }
8773 else
8774 {
8775 /* memory target */
8776 IEM_MC_BEGIN(0, 1);
8777 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8780 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8781 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8782 } IEM_MC_ELSE() {
8783 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8784 } IEM_MC_ENDIF();
8785 IEM_MC_ADVANCE_RIP();
8786 IEM_MC_END();
8787 }
8788 return VINF_SUCCESS;
8789}
8790
8791
8792/** Opcode 0x0f 0x9c. */
8793FNIEMOP_DEF(iemOp_setl_Eb)
8794{
8795 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8796 IEMOP_HLP_MIN_386();
8797 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8798
8799 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8800 * any way. AMD says it's "unused", whatever that means. We're
8801 * ignoring for now. */
8802 if (IEM_IS_MODRM_REG_MODE(bRm))
8803 {
8804 /* register target */
8805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8806 IEM_MC_BEGIN(0, 0);
8807 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8808 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8809 } IEM_MC_ELSE() {
8810 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8811 } IEM_MC_ENDIF();
8812 IEM_MC_ADVANCE_RIP();
8813 IEM_MC_END();
8814 }
8815 else
8816 {
8817 /* memory target */
8818 IEM_MC_BEGIN(0, 1);
8819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8822 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8823 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8824 } IEM_MC_ELSE() {
8825 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8826 } IEM_MC_ENDIF();
8827 IEM_MC_ADVANCE_RIP();
8828 IEM_MC_END();
8829 }
8830 return VINF_SUCCESS;
8831}
8832
8833
8834/** Opcode 0x0f 0x9d. */
8835FNIEMOP_DEF(iemOp_setnl_Eb)
8836{
8837 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8838 IEMOP_HLP_MIN_386();
8839 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8840
8841 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8842 * any way. AMD says it's "unused", whatever that means. We're
8843 * ignoring for now. */
8844 if (IEM_IS_MODRM_REG_MODE(bRm))
8845 {
8846 /* register target */
8847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8848 IEM_MC_BEGIN(0, 0);
8849 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8850 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8851 } IEM_MC_ELSE() {
8852 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8853 } IEM_MC_ENDIF();
8854 IEM_MC_ADVANCE_RIP();
8855 IEM_MC_END();
8856 }
8857 else
8858 {
8859 /* memory target */
8860 IEM_MC_BEGIN(0, 1);
8861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8864 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8865 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8866 } IEM_MC_ELSE() {
8867 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8868 } IEM_MC_ENDIF();
8869 IEM_MC_ADVANCE_RIP();
8870 IEM_MC_END();
8871 }
8872 return VINF_SUCCESS;
8873}
8874
8875
8876/** Opcode 0x0f 0x9e. */
8877FNIEMOP_DEF(iemOp_setle_Eb)
8878{
8879 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8880 IEMOP_HLP_MIN_386();
8881 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8882
8883 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8884 * any way. AMD says it's "unused", whatever that means. We're
8885 * ignoring for now. */
8886 if (IEM_IS_MODRM_REG_MODE(bRm))
8887 {
8888 /* register target */
8889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8890 IEM_MC_BEGIN(0, 0);
8891 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8892 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8893 } IEM_MC_ELSE() {
8894 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8895 } IEM_MC_ENDIF();
8896 IEM_MC_ADVANCE_RIP();
8897 IEM_MC_END();
8898 }
8899 else
8900 {
8901 /* memory target */
8902 IEM_MC_BEGIN(0, 1);
8903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8906 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8907 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8908 } IEM_MC_ELSE() {
8909 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8910 } IEM_MC_ENDIF();
8911 IEM_MC_ADVANCE_RIP();
8912 IEM_MC_END();
8913 }
8914 return VINF_SUCCESS;
8915}
8916
8917
8918/** Opcode 0x0f 0x9f. */
8919FNIEMOP_DEF(iemOp_setnle_Eb)
8920{
8921 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8922 IEMOP_HLP_MIN_386();
8923 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8924
8925 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8926 * any way. AMD says it's "unused", whatever that means. We're
8927 * ignoring for now. */
8928 if (IEM_IS_MODRM_REG_MODE(bRm))
8929 {
8930 /* register target */
8931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8932 IEM_MC_BEGIN(0, 0);
8933 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8934 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8935 } IEM_MC_ELSE() {
8936 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8937 } IEM_MC_ENDIF();
8938 IEM_MC_ADVANCE_RIP();
8939 IEM_MC_END();
8940 }
8941 else
8942 {
8943 /* memory target */
8944 IEM_MC_BEGIN(0, 1);
8945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8948 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8949 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8950 } IEM_MC_ELSE() {
8951 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8952 } IEM_MC_ENDIF();
8953 IEM_MC_ADVANCE_RIP();
8954 IEM_MC_END();
8955 }
8956 return VINF_SUCCESS;
8957}
8958
8959
8960/**
8961 * Common 'push segment-register' helper.
8962 */
8963FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
8964{
8965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8966 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
8967 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8968
8969 switch (pVCpu->iem.s.enmEffOpSize)
8970 {
8971 case IEMMODE_16BIT:
8972 IEM_MC_BEGIN(0, 1);
8973 IEM_MC_LOCAL(uint16_t, u16Value);
8974 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
8975 IEM_MC_PUSH_U16(u16Value);
8976 IEM_MC_ADVANCE_RIP();
8977 IEM_MC_END();
8978 break;
8979
8980 case IEMMODE_32BIT:
8981 IEM_MC_BEGIN(0, 1);
8982 IEM_MC_LOCAL(uint32_t, u32Value);
8983 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
8984 IEM_MC_PUSH_U32_SREG(u32Value);
8985 IEM_MC_ADVANCE_RIP();
8986 IEM_MC_END();
8987 break;
8988
8989 case IEMMODE_64BIT:
8990 IEM_MC_BEGIN(0, 1);
8991 IEM_MC_LOCAL(uint64_t, u64Value);
8992 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
8993 IEM_MC_PUSH_U64(u64Value);
8994 IEM_MC_ADVANCE_RIP();
8995 IEM_MC_END();
8996 break;
8997 }
8998
8999 return VINF_SUCCESS;
9000}
9001
9002
9003/** Opcode 0x0f 0xa0. */
9004FNIEMOP_DEF(iemOp_push_fs)
9005{
9006 IEMOP_MNEMONIC(push_fs, "push fs");
9007 IEMOP_HLP_MIN_386();
9008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9009 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
9010}
9011
9012
9013/** Opcode 0x0f 0xa1. */
9014FNIEMOP_DEF(iemOp_pop_fs)
9015{
9016 IEMOP_MNEMONIC(pop_fs, "pop fs");
9017 IEMOP_HLP_MIN_386();
9018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9019 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
9020}
9021
9022
9023/** Opcode 0x0f 0xa2. */
9024FNIEMOP_DEF(iemOp_cpuid)
9025{
9026 IEMOP_MNEMONIC(cpuid, "cpuid");
9027 IEMOP_HLP_MIN_486(); /* not all 486es. */
9028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9029 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
9030}
9031
9032
9033/**
9034 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
9035 * iemOp_bts_Ev_Gv.
9036 */
9037FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
9038{
9039 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9040 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9041
9042 if (IEM_IS_MODRM_REG_MODE(bRm))
9043 {
9044 /* register destination. */
9045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9046 switch (pVCpu->iem.s.enmEffOpSize)
9047 {
9048 case IEMMODE_16BIT:
9049 IEM_MC_BEGIN(3, 0);
9050 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9051 IEM_MC_ARG(uint16_t, u16Src, 1);
9052 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9053
9054 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9055 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
9056 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9057 IEM_MC_REF_EFLAGS(pEFlags);
9058 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9059
9060 IEM_MC_ADVANCE_RIP();
9061 IEM_MC_END();
9062 return VINF_SUCCESS;
9063
9064 case IEMMODE_32BIT:
9065 IEM_MC_BEGIN(3, 0);
9066 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9067 IEM_MC_ARG(uint32_t, u32Src, 1);
9068 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9069
9070 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9071 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
9072 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9073 IEM_MC_REF_EFLAGS(pEFlags);
9074 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9075
9076 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9077 IEM_MC_ADVANCE_RIP();
9078 IEM_MC_END();
9079 return VINF_SUCCESS;
9080
9081 case IEMMODE_64BIT:
9082 IEM_MC_BEGIN(3, 0);
9083 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9084 IEM_MC_ARG(uint64_t, u64Src, 1);
9085 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9086
9087 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9088 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
9089 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9090 IEM_MC_REF_EFLAGS(pEFlags);
9091 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9092
9093 IEM_MC_ADVANCE_RIP();
9094 IEM_MC_END();
9095 return VINF_SUCCESS;
9096
9097 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9098 }
9099 }
9100 else
9101 {
9102 /* memory destination. */
9103
9104 uint32_t fAccess;
9105 if (pImpl->pfnLockedU16)
9106 fAccess = IEM_ACCESS_DATA_RW;
9107 else /* BT */
9108 fAccess = IEM_ACCESS_DATA_R;
9109
9110 /** @todo test negative bit offsets! */
9111 switch (pVCpu->iem.s.enmEffOpSize)
9112 {
9113 case IEMMODE_16BIT:
9114 IEM_MC_BEGIN(3, 2);
9115 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9116 IEM_MC_ARG(uint16_t, u16Src, 1);
9117 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9119 IEM_MC_LOCAL(int16_t, i16AddrAdj);
9120
9121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9122 if (pImpl->pfnLockedU16)
9123 IEMOP_HLP_DONE_DECODING();
9124 else
9125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9126 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9127 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
9128 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
9129 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
9130 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
9131 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
9132 IEM_MC_FETCH_EFLAGS(EFlags);
9133
9134 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9135 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9136 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9137 else
9138 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9139 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9140
9141 IEM_MC_COMMIT_EFLAGS(EFlags);
9142 IEM_MC_ADVANCE_RIP();
9143 IEM_MC_END();
9144 return VINF_SUCCESS;
9145
9146 case IEMMODE_32BIT:
9147 IEM_MC_BEGIN(3, 2);
9148 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9149 IEM_MC_ARG(uint32_t, u32Src, 1);
9150 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9151 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9152 IEM_MC_LOCAL(int32_t, i32AddrAdj);
9153
9154 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9155 if (pImpl->pfnLockedU16)
9156 IEMOP_HLP_DONE_DECODING();
9157 else
9158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9159 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9160 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
9161 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
9162 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
9163 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
9164 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
9165 IEM_MC_FETCH_EFLAGS(EFlags);
9166
9167 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9168 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9169 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9170 else
9171 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9172 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9173
9174 IEM_MC_COMMIT_EFLAGS(EFlags);
9175 IEM_MC_ADVANCE_RIP();
9176 IEM_MC_END();
9177 return VINF_SUCCESS;
9178
9179 case IEMMODE_64BIT:
9180 IEM_MC_BEGIN(3, 2);
9181 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9182 IEM_MC_ARG(uint64_t, u64Src, 1);
9183 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9184 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9185 IEM_MC_LOCAL(int64_t, i64AddrAdj);
9186
9187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9188 if (pImpl->pfnLockedU16)
9189 IEMOP_HLP_DONE_DECODING();
9190 else
9191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9192 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9193 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
9194 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
9195 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
9196 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
9197 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
9198 IEM_MC_FETCH_EFLAGS(EFlags);
9199
9200 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9201 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9202 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9203 else
9204 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9205 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9206
9207 IEM_MC_COMMIT_EFLAGS(EFlags);
9208 IEM_MC_ADVANCE_RIP();
9209 IEM_MC_END();
9210 return VINF_SUCCESS;
9211
9212 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9213 }
9214 }
9215}
9216
9217
9218/** Opcode 0x0f 0xa3. */
9219FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9220{
9221 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9222 IEMOP_HLP_MIN_386();
9223 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
9224}
9225
9226
9227/**
9228 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9229 */
9230FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
9231{
9232 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9233 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9234
9235 if (IEM_IS_MODRM_REG_MODE(bRm))
9236 {
9237 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9239
9240 switch (pVCpu->iem.s.enmEffOpSize)
9241 {
9242 case IEMMODE_16BIT:
9243 IEM_MC_BEGIN(4, 0);
9244 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9245 IEM_MC_ARG(uint16_t, u16Src, 1);
9246 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9247 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9248
9249 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9250 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9251 IEM_MC_REF_EFLAGS(pEFlags);
9252 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9253
9254 IEM_MC_ADVANCE_RIP();
9255 IEM_MC_END();
9256 return VINF_SUCCESS;
9257
9258 case IEMMODE_32BIT:
9259 IEM_MC_BEGIN(4, 0);
9260 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9261 IEM_MC_ARG(uint32_t, u32Src, 1);
9262 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9263 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9264
9265 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9266 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9267 IEM_MC_REF_EFLAGS(pEFlags);
9268 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9269
9270 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9271 IEM_MC_ADVANCE_RIP();
9272 IEM_MC_END();
9273 return VINF_SUCCESS;
9274
9275 case IEMMODE_64BIT:
9276 IEM_MC_BEGIN(4, 0);
9277 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9278 IEM_MC_ARG(uint64_t, u64Src, 1);
9279 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9280 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9281
9282 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9283 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9284 IEM_MC_REF_EFLAGS(pEFlags);
9285 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9286
9287 IEM_MC_ADVANCE_RIP();
9288 IEM_MC_END();
9289 return VINF_SUCCESS;
9290
9291 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9292 }
9293 }
9294 else
9295 {
9296 switch (pVCpu->iem.s.enmEffOpSize)
9297 {
9298 case IEMMODE_16BIT:
9299 IEM_MC_BEGIN(4, 2);
9300 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9301 IEM_MC_ARG(uint16_t, u16Src, 1);
9302 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9303 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9305
9306 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9307 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9308 IEM_MC_ASSIGN(cShiftArg, cShift);
9309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9310 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9311 IEM_MC_FETCH_EFLAGS(EFlags);
9312 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9313 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9314
9315 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9316 IEM_MC_COMMIT_EFLAGS(EFlags);
9317 IEM_MC_ADVANCE_RIP();
9318 IEM_MC_END();
9319 return VINF_SUCCESS;
9320
9321 case IEMMODE_32BIT:
9322 IEM_MC_BEGIN(4, 2);
9323 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9324 IEM_MC_ARG(uint32_t, u32Src, 1);
9325 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9326 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9328
9329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9330 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9331 IEM_MC_ASSIGN(cShiftArg, cShift);
9332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9333 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9334 IEM_MC_FETCH_EFLAGS(EFlags);
9335 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9336 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9337
9338 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9339 IEM_MC_COMMIT_EFLAGS(EFlags);
9340 IEM_MC_ADVANCE_RIP();
9341 IEM_MC_END();
9342 return VINF_SUCCESS;
9343
9344 case IEMMODE_64BIT:
9345 IEM_MC_BEGIN(4, 2);
9346 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9347 IEM_MC_ARG(uint64_t, u64Src, 1);
9348 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9349 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9351
9352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9353 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9354 IEM_MC_ASSIGN(cShiftArg, cShift);
9355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9356 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9357 IEM_MC_FETCH_EFLAGS(EFlags);
9358 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9359 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9360
9361 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9362 IEM_MC_COMMIT_EFLAGS(EFlags);
9363 IEM_MC_ADVANCE_RIP();
9364 IEM_MC_END();
9365 return VINF_SUCCESS;
9366
9367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9368 }
9369 }
9370}
9371
9372
9373/**
9374 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9375 */
9376FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
9377{
9378 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9379 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9380
9381 if (IEM_IS_MODRM_REG_MODE(bRm))
9382 {
9383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9384
9385 switch (pVCpu->iem.s.enmEffOpSize)
9386 {
9387 case IEMMODE_16BIT:
9388 IEM_MC_BEGIN(4, 0);
9389 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9390 IEM_MC_ARG(uint16_t, u16Src, 1);
9391 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9392 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9393
9394 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9395 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9396 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9397 IEM_MC_REF_EFLAGS(pEFlags);
9398 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9399
9400 IEM_MC_ADVANCE_RIP();
9401 IEM_MC_END();
9402 return VINF_SUCCESS;
9403
9404 case IEMMODE_32BIT:
9405 IEM_MC_BEGIN(4, 0);
9406 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9407 IEM_MC_ARG(uint32_t, u32Src, 1);
9408 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9409 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9410
9411 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9412 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9413 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9414 IEM_MC_REF_EFLAGS(pEFlags);
9415 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9416
9417 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9418 IEM_MC_ADVANCE_RIP();
9419 IEM_MC_END();
9420 return VINF_SUCCESS;
9421
9422 case IEMMODE_64BIT:
9423 IEM_MC_BEGIN(4, 0);
9424 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9425 IEM_MC_ARG(uint64_t, u64Src, 1);
9426 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9427 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9428
9429 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9430 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9431 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9432 IEM_MC_REF_EFLAGS(pEFlags);
9433 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9434
9435 IEM_MC_ADVANCE_RIP();
9436 IEM_MC_END();
9437 return VINF_SUCCESS;
9438
9439 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9440 }
9441 }
9442 else
9443 {
9444 switch (pVCpu->iem.s.enmEffOpSize)
9445 {
9446 case IEMMODE_16BIT:
9447 IEM_MC_BEGIN(4, 2);
9448 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9449 IEM_MC_ARG(uint16_t, u16Src, 1);
9450 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9451 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9453
9454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9456 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9457 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9458 IEM_MC_FETCH_EFLAGS(EFlags);
9459 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9460 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9461
9462 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9463 IEM_MC_COMMIT_EFLAGS(EFlags);
9464 IEM_MC_ADVANCE_RIP();
9465 IEM_MC_END();
9466 return VINF_SUCCESS;
9467
9468 case IEMMODE_32BIT:
9469 IEM_MC_BEGIN(4, 2);
9470 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9471 IEM_MC_ARG(uint32_t, u32Src, 1);
9472 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9473 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9475
9476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9478 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9479 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9480 IEM_MC_FETCH_EFLAGS(EFlags);
9481 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9482 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9483
9484 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9485 IEM_MC_COMMIT_EFLAGS(EFlags);
9486 IEM_MC_ADVANCE_RIP();
9487 IEM_MC_END();
9488 return VINF_SUCCESS;
9489
9490 case IEMMODE_64BIT:
9491 IEM_MC_BEGIN(4, 2);
9492 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9493 IEM_MC_ARG(uint64_t, u64Src, 1);
9494 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9495 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9497
9498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9500 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9501 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9502 IEM_MC_FETCH_EFLAGS(EFlags);
9503 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9504 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9505
9506 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9507 IEM_MC_COMMIT_EFLAGS(EFlags);
9508 IEM_MC_ADVANCE_RIP();
9509 IEM_MC_END();
9510 return VINF_SUCCESS;
9511
9512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9513 }
9514 }
9515}
9516
9517
9518
9519/** Opcode 0x0f 0xa4. */
9520FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9521{
9522 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9523 IEMOP_HLP_MIN_386();
9524 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9525}
9526
9527
9528/** Opcode 0x0f 0xa5. */
9529FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9530{
9531 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9532 IEMOP_HLP_MIN_386();
9533 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9534}
9535
9536
9537/** Opcode 0x0f 0xa8. */
9538FNIEMOP_DEF(iemOp_push_gs)
9539{
9540 IEMOP_MNEMONIC(push_gs, "push gs");
9541 IEMOP_HLP_MIN_386();
9542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9543 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9544}
9545
9546
9547/** Opcode 0x0f 0xa9. */
9548FNIEMOP_DEF(iemOp_pop_gs)
9549{
9550 IEMOP_MNEMONIC(pop_gs, "pop gs");
9551 IEMOP_HLP_MIN_386();
9552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9553 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9554}
9555
9556
9557/** Opcode 0x0f 0xaa. */
9558FNIEMOP_DEF(iemOp_rsm)
9559{
9560 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9561 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9563 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
9564}
9565
9566
9567
9568/** Opcode 0x0f 0xab. */
9569FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9570{
9571 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9572 IEMOP_HLP_MIN_386();
9573 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
9574}
9575
9576
9577/** Opcode 0x0f 0xac. */
9578FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9579{
9580 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9581 IEMOP_HLP_MIN_386();
9582 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9583}
9584
9585
9586/** Opcode 0x0f 0xad. */
9587FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9588{
9589 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9590 IEMOP_HLP_MIN_386();
9591 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9592}
9593
9594
9595/** Opcode 0x0f 0xae mem/0. */
9596FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9597{
9598 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9599 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9600 return IEMOP_RAISE_INVALID_OPCODE();
9601
9602 IEM_MC_BEGIN(3, 1);
9603 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9604 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9605 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9608 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9609 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9610 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9611 IEM_MC_END();
9612 return VINF_SUCCESS;
9613}
9614
9615
9616/** Opcode 0x0f 0xae mem/1. */
9617FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9618{
9619 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9620 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9621 return IEMOP_RAISE_INVALID_OPCODE();
9622
9623 IEM_MC_BEGIN(3, 1);
9624 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9625 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9626 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9629 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9630 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9631 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9632 IEM_MC_END();
9633 return VINF_SUCCESS;
9634}
9635
9636
9637/**
9638 * @opmaps grp15
9639 * @opcode !11/2
9640 * @oppfx none
9641 * @opcpuid sse
9642 * @opgroup og_sse_mxcsrsm
9643 * @opxcpttype 5
9644 * @optest op1=0 -> mxcsr=0
9645 * @optest op1=0x2083 -> mxcsr=0x2083
9646 * @optest op1=0xfffffffe -> value.xcpt=0xd
9647 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9648 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9649 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9650 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9651 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9652 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9653 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9654 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9655 */
9656FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9657{
9658 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9659 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9660 return IEMOP_RAISE_INVALID_OPCODE();
9661
9662 IEM_MC_BEGIN(2, 0);
9663 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9664 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9667 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9668 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9669 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9670 IEM_MC_END();
9671 return VINF_SUCCESS;
9672}
9673
9674
9675/**
9676 * @opmaps grp15
9677 * @opcode !11/3
9678 * @oppfx none
9679 * @opcpuid sse
9680 * @opgroup og_sse_mxcsrsm
9681 * @opxcpttype 5
9682 * @optest mxcsr=0 -> op1=0
9683 * @optest mxcsr=0x2083 -> op1=0x2083
9684 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9685 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9686 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9687 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9688 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9689 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9690 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9691 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9692 */
9693FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9694{
9695 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9696 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9697 return IEMOP_RAISE_INVALID_OPCODE();
9698
9699 IEM_MC_BEGIN(2, 0);
9700 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9701 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9704 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9705 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9706 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9707 IEM_MC_END();
9708 return VINF_SUCCESS;
9709}
9710
9711
9712/**
9713 * @opmaps grp15
9714 * @opcode !11/4
9715 * @oppfx none
9716 * @opcpuid xsave
9717 * @opgroup og_system
9718 * @opxcpttype none
9719 */
9720FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9721{
9722 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9723 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9724 return IEMOP_RAISE_INVALID_OPCODE();
9725
9726 IEM_MC_BEGIN(3, 0);
9727 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9728 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9729 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9732 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9733 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9734 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9735 IEM_MC_END();
9736 return VINF_SUCCESS;
9737}
9738
9739
9740/**
9741 * @opmaps grp15
9742 * @opcode !11/5
9743 * @oppfx none
9744 * @opcpuid xsave
9745 * @opgroup og_system
9746 * @opxcpttype none
9747 */
9748FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9749{
9750 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9751 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9752 return IEMOP_RAISE_INVALID_OPCODE();
9753
9754 IEM_MC_BEGIN(3, 0);
9755 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9756 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9757 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9760 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9761 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9762 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9763 IEM_MC_END();
9764 return VINF_SUCCESS;
9765}
9766
9767/** Opcode 0x0f 0xae mem/6. */
9768FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9769
9770/**
9771 * @opmaps grp15
9772 * @opcode !11/7
9773 * @oppfx none
9774 * @opcpuid clfsh
9775 * @opgroup og_cachectl
9776 * @optest op1=1 ->
9777 */
9778FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9779{
9780 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9781 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9782 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9783
9784 IEM_MC_BEGIN(2, 0);
9785 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9786 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9789 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9790 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9791 IEM_MC_END();
9792 return VINF_SUCCESS;
9793}
9794
9795/**
9796 * @opmaps grp15
9797 * @opcode !11/7
9798 * @oppfx 0x66
9799 * @opcpuid clflushopt
9800 * @opgroup og_cachectl
9801 * @optest op1=1 ->
9802 */
9803FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9804{
9805 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9806 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9807 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9808
9809 IEM_MC_BEGIN(2, 0);
9810 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9811 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9814 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9815 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9816 IEM_MC_END();
9817 return VINF_SUCCESS;
9818}
9819
9820
9821/** Opcode 0x0f 0xae 11b/5. */
9822FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9823{
9824 RT_NOREF_PV(bRm);
9825 IEMOP_MNEMONIC(lfence, "lfence");
9826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9827 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9828 return IEMOP_RAISE_INVALID_OPCODE();
9829
9830 IEM_MC_BEGIN(0, 0);
9831#ifndef RT_ARCH_ARM64
9832 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9833#endif
9834 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9835#ifndef RT_ARCH_ARM64
9836 else
9837 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9838#endif
9839 IEM_MC_ADVANCE_RIP();
9840 IEM_MC_END();
9841 return VINF_SUCCESS;
9842}
9843
9844
9845/** Opcode 0x0f 0xae 11b/6. */
9846FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9847{
9848 RT_NOREF_PV(bRm);
9849 IEMOP_MNEMONIC(mfence, "mfence");
9850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9851 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9852 return IEMOP_RAISE_INVALID_OPCODE();
9853
9854 IEM_MC_BEGIN(0, 0);
9855#ifndef RT_ARCH_ARM64
9856 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9857#endif
9858 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9859#ifndef RT_ARCH_ARM64
9860 else
9861 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9862#endif
9863 IEM_MC_ADVANCE_RIP();
9864 IEM_MC_END();
9865 return VINF_SUCCESS;
9866}
9867
9868
9869/** Opcode 0x0f 0xae 11b/7. */
9870FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9871{
9872 RT_NOREF_PV(bRm);
9873 IEMOP_MNEMONIC(sfence, "sfence");
9874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9875 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9876 return IEMOP_RAISE_INVALID_OPCODE();
9877
9878 IEM_MC_BEGIN(0, 0);
9879#ifndef RT_ARCH_ARM64
9880 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9881#endif
9882 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9883#ifndef RT_ARCH_ARM64
9884 else
9885 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9886#endif
9887 IEM_MC_ADVANCE_RIP();
9888 IEM_MC_END();
9889 return VINF_SUCCESS;
9890}
9891
9892
9893/** Opcode 0xf3 0x0f 0xae 11b/0. */
9894FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
9895{
9896 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
9897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9898 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9899 {
9900 IEM_MC_BEGIN(1, 0);
9901 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9902 IEM_MC_ARG(uint64_t, u64Dst, 0);
9903 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
9904 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9905 IEM_MC_ADVANCE_RIP();
9906 IEM_MC_END();
9907 }
9908 else
9909 {
9910 IEM_MC_BEGIN(1, 0);
9911 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9912 IEM_MC_ARG(uint32_t, u32Dst, 0);
9913 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
9914 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9915 IEM_MC_ADVANCE_RIP();
9916 IEM_MC_END();
9917 }
9918 return VINF_SUCCESS;
9919}
9920
9921
9922/** Opcode 0xf3 0x0f 0xae 11b/1. */
9923FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
9924{
9925 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
9926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9927 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9928 {
9929 IEM_MC_BEGIN(1, 0);
9930 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9931 IEM_MC_ARG(uint64_t, u64Dst, 0);
9932 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
9933 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9934 IEM_MC_ADVANCE_RIP();
9935 IEM_MC_END();
9936 }
9937 else
9938 {
9939 IEM_MC_BEGIN(1, 0);
9940 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9941 IEM_MC_ARG(uint32_t, u32Dst, 0);
9942 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
9943 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9944 IEM_MC_ADVANCE_RIP();
9945 IEM_MC_END();
9946 }
9947 return VINF_SUCCESS;
9948}
9949
9950
9951/** Opcode 0xf3 0x0f 0xae 11b/2. */
9952FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
9953{
9954 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
9955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9956 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9957 {
9958 IEM_MC_BEGIN(1, 0);
9959 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9960 IEM_MC_ARG(uint64_t, u64Dst, 0);
9961 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9962 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9963 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
9964 IEM_MC_ADVANCE_RIP();
9965 IEM_MC_END();
9966 }
9967 else
9968 {
9969 IEM_MC_BEGIN(1, 0);
9970 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9971 IEM_MC_ARG(uint32_t, u32Dst, 0);
9972 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9973 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
9974 IEM_MC_ADVANCE_RIP();
9975 IEM_MC_END();
9976 }
9977 return VINF_SUCCESS;
9978}
9979
9980
9981/** Opcode 0xf3 0x0f 0xae 11b/3. */
9982FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
9983{
9984 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
9985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9986 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9987 {
9988 IEM_MC_BEGIN(1, 0);
9989 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9990 IEM_MC_ARG(uint64_t, u64Dst, 0);
9991 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9992 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9993 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
9994 IEM_MC_ADVANCE_RIP();
9995 IEM_MC_END();
9996 }
9997 else
9998 {
9999 IEM_MC_BEGIN(1, 0);
10000 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10001 IEM_MC_ARG(uint32_t, u32Dst, 0);
10002 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10003 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
10004 IEM_MC_ADVANCE_RIP();
10005 IEM_MC_END();
10006 }
10007 return VINF_SUCCESS;
10008}
10009
10010
10011/**
10012 * Group 15 jump table for register variant.
10013 */
10014IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
10015{ /* pfx: none, 066h, 0f3h, 0f2h */
10016 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
10017 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
10018 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
10019 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
10020 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10021 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10022 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10023 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10024};
10025AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
10026
10027
10028/**
10029 * Group 15 jump table for memory variant.
10030 */
10031IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
10032{ /* pfx: none, 066h, 0f3h, 0f2h */
10033 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10034 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10035 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10036 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10037 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10038 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10039 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10040 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10041};
10042AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
10043
10044
10045/** Opcode 0x0f 0xae. */
10046FNIEMOP_DEF(iemOp_Grp15)
10047{
10048 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10049 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10050 if (IEM_IS_MODRM_REG_MODE(bRm))
10051 /* register, register */
10052 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10053 + pVCpu->iem.s.idxPrefix], bRm);
10054 /* memory, register */
10055 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10056 + pVCpu->iem.s.idxPrefix], bRm);
10057}
10058
10059
10060/** Opcode 0x0f 0xaf. */
10061FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10062{
10063 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10064 IEMOP_HLP_MIN_386();
10065 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10066 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags));
10067}
10068
10069
10070/** Opcode 0x0f 0xb0. */
10071FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10072{
10073 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10074 IEMOP_HLP_MIN_486();
10075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10076
10077 if (IEM_IS_MODRM_REG_MODE(bRm))
10078 {
10079 IEMOP_HLP_DONE_DECODING();
10080 IEM_MC_BEGIN(4, 0);
10081 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10082 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10083 IEM_MC_ARG(uint8_t, u8Src, 2);
10084 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10085
10086 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10087 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10088 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10089 IEM_MC_REF_EFLAGS(pEFlags);
10090 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10091 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10092 else
10093 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10094
10095 IEM_MC_ADVANCE_RIP();
10096 IEM_MC_END();
10097 }
10098 else
10099 {
10100 IEM_MC_BEGIN(4, 3);
10101 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10102 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10103 IEM_MC_ARG(uint8_t, u8Src, 2);
10104 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10106 IEM_MC_LOCAL(uint8_t, u8Al);
10107
10108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10109 IEMOP_HLP_DONE_DECODING();
10110 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10111 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10112 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
10113 IEM_MC_FETCH_EFLAGS(EFlags);
10114 IEM_MC_REF_LOCAL(pu8Al, u8Al);
10115 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10116 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10117 else
10118 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10119
10120 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10121 IEM_MC_COMMIT_EFLAGS(EFlags);
10122 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
10123 IEM_MC_ADVANCE_RIP();
10124 IEM_MC_END();
10125 }
10126 return VINF_SUCCESS;
10127}
10128
10129/** Opcode 0x0f 0xb1. */
10130FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10131{
10132 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10133 IEMOP_HLP_MIN_486();
10134 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10135
10136 if (IEM_IS_MODRM_REG_MODE(bRm))
10137 {
10138 IEMOP_HLP_DONE_DECODING();
10139 switch (pVCpu->iem.s.enmEffOpSize)
10140 {
10141 case IEMMODE_16BIT:
10142 IEM_MC_BEGIN(4, 0);
10143 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10144 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10145 IEM_MC_ARG(uint16_t, u16Src, 2);
10146 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10147
10148 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10149 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10150 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10151 IEM_MC_REF_EFLAGS(pEFlags);
10152 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10153 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10154 else
10155 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10156
10157 IEM_MC_ADVANCE_RIP();
10158 IEM_MC_END();
10159 return VINF_SUCCESS;
10160
10161 case IEMMODE_32BIT:
10162 IEM_MC_BEGIN(4, 0);
10163 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10164 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10165 IEM_MC_ARG(uint32_t, u32Src, 2);
10166 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10167
10168 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10169 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10170 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10171 IEM_MC_REF_EFLAGS(pEFlags);
10172 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10173 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10174 else
10175 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10176
10177 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10178 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10179 } IEM_MC_ELSE() {
10180 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
10181 } IEM_MC_ENDIF();
10182
10183 IEM_MC_ADVANCE_RIP();
10184 IEM_MC_END();
10185 return VINF_SUCCESS;
10186
10187 case IEMMODE_64BIT:
10188 IEM_MC_BEGIN(4, 0);
10189 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10190 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10191#ifdef RT_ARCH_X86
10192 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10193#else
10194 IEM_MC_ARG(uint64_t, u64Src, 2);
10195#endif
10196 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10197
10198 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10199 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10200 IEM_MC_REF_EFLAGS(pEFlags);
10201#ifdef RT_ARCH_X86
10202 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10203 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10204 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10205 else
10206 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10207#else
10208 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10209 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10210 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10211 else
10212 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10213#endif
10214
10215 IEM_MC_ADVANCE_RIP();
10216 IEM_MC_END();
10217 return VINF_SUCCESS;
10218
10219 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10220 }
10221 }
10222 else
10223 {
10224 switch (pVCpu->iem.s.enmEffOpSize)
10225 {
10226 case IEMMODE_16BIT:
10227 IEM_MC_BEGIN(4, 3);
10228 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10229 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10230 IEM_MC_ARG(uint16_t, u16Src, 2);
10231 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10233 IEM_MC_LOCAL(uint16_t, u16Ax);
10234
10235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10236 IEMOP_HLP_DONE_DECODING();
10237 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10238 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10239 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
10240 IEM_MC_FETCH_EFLAGS(EFlags);
10241 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
10242 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10243 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10244 else
10245 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10246
10247 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10248 IEM_MC_COMMIT_EFLAGS(EFlags);
10249 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
10250 IEM_MC_ADVANCE_RIP();
10251 IEM_MC_END();
10252 return VINF_SUCCESS;
10253
10254 case IEMMODE_32BIT:
10255 IEM_MC_BEGIN(4, 3);
10256 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10257 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10258 IEM_MC_ARG(uint32_t, u32Src, 2);
10259 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10261 IEM_MC_LOCAL(uint32_t, u32Eax);
10262
10263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10264 IEMOP_HLP_DONE_DECODING();
10265 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10266 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10267 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
10268 IEM_MC_FETCH_EFLAGS(EFlags);
10269 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
10270 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10271 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10272 else
10273 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10274
10275 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10276 IEM_MC_COMMIT_EFLAGS(EFlags);
10277 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
10278 IEM_MC_ADVANCE_RIP();
10279 IEM_MC_END();
10280 return VINF_SUCCESS;
10281
10282 case IEMMODE_64BIT:
10283 IEM_MC_BEGIN(4, 3);
10284 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10285 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10286#ifdef RT_ARCH_X86
10287 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10288#else
10289 IEM_MC_ARG(uint64_t, u64Src, 2);
10290#endif
10291 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10293 IEM_MC_LOCAL(uint64_t, u64Rax);
10294
10295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10296 IEMOP_HLP_DONE_DECODING();
10297 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10298 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
10299 IEM_MC_FETCH_EFLAGS(EFlags);
10300 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
10301#ifdef RT_ARCH_X86
10302 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10303 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10304 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10305 else
10306 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10307#else
10308 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10309 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10310 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10311 else
10312 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10313#endif
10314
10315 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10316 IEM_MC_COMMIT_EFLAGS(EFlags);
10317 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
10318 IEM_MC_ADVANCE_RIP();
10319 IEM_MC_END();
10320 return VINF_SUCCESS;
10321
10322 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10323 }
10324 }
10325}
10326
10327
10328FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
10329{
10330 Assert(IEM_IS_MODRM_MEM_MODE(bRm)); /* Caller checks this */
10331 uint8_t const iGReg = IEM_GET_MODRM_REG(pVCpu, bRm);
10332
10333 switch (pVCpu->iem.s.enmEffOpSize)
10334 {
10335 case IEMMODE_16BIT:
10336 IEM_MC_BEGIN(5, 1);
10337 IEM_MC_ARG(uint16_t, uSel, 0);
10338 IEM_MC_ARG(uint16_t, offSeg, 1);
10339 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10340 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10341 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10342 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10345 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10346 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
10347 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10348 IEM_MC_END();
10349 return VINF_SUCCESS;
10350
10351 case IEMMODE_32BIT:
10352 IEM_MC_BEGIN(5, 1);
10353 IEM_MC_ARG(uint16_t, uSel, 0);
10354 IEM_MC_ARG(uint32_t, offSeg, 1);
10355 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10356 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10357 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10358 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10361 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10362 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
10363 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10364 IEM_MC_END();
10365 return VINF_SUCCESS;
10366
10367 case IEMMODE_64BIT:
10368 IEM_MC_BEGIN(5, 1);
10369 IEM_MC_ARG(uint16_t, uSel, 0);
10370 IEM_MC_ARG(uint64_t, offSeg, 1);
10371 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10372 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10373 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10374 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10377 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
10378 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10379 else
10380 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10381 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
10382 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10383 IEM_MC_END();
10384 return VINF_SUCCESS;
10385
10386 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10387 }
10388}
10389
10390
10391/** Opcode 0x0f 0xb2. */
10392FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10393{
10394 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10395 IEMOP_HLP_MIN_386();
10396 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10397 if (IEM_IS_MODRM_REG_MODE(bRm))
10398 return IEMOP_RAISE_INVALID_OPCODE();
10399 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10400}
10401
10402
10403/** Opcode 0x0f 0xb3. */
10404FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10405{
10406 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10407 IEMOP_HLP_MIN_386();
10408 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
10409}
10410
10411
10412/** Opcode 0x0f 0xb4. */
10413FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10414{
10415 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10416 IEMOP_HLP_MIN_386();
10417 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10418 if (IEM_IS_MODRM_REG_MODE(bRm))
10419 return IEMOP_RAISE_INVALID_OPCODE();
10420 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10421}
10422
10423
10424/** Opcode 0x0f 0xb5. */
10425FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10426{
10427 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10428 IEMOP_HLP_MIN_386();
10429 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10430 if (IEM_IS_MODRM_REG_MODE(bRm))
10431 return IEMOP_RAISE_INVALID_OPCODE();
10432 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10433}
10434
10435
10436/** Opcode 0x0f 0xb6. */
10437FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10438{
10439 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10440 IEMOP_HLP_MIN_386();
10441
10442 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10443
10444 /*
10445 * If rm is denoting a register, no more instruction bytes.
10446 */
10447 if (IEM_IS_MODRM_REG_MODE(bRm))
10448 {
10449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10450 switch (pVCpu->iem.s.enmEffOpSize)
10451 {
10452 case IEMMODE_16BIT:
10453 IEM_MC_BEGIN(0, 1);
10454 IEM_MC_LOCAL(uint16_t, u16Value);
10455 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10456 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10457 IEM_MC_ADVANCE_RIP();
10458 IEM_MC_END();
10459 return VINF_SUCCESS;
10460
10461 case IEMMODE_32BIT:
10462 IEM_MC_BEGIN(0, 1);
10463 IEM_MC_LOCAL(uint32_t, u32Value);
10464 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10465 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10466 IEM_MC_ADVANCE_RIP();
10467 IEM_MC_END();
10468 return VINF_SUCCESS;
10469
10470 case IEMMODE_64BIT:
10471 IEM_MC_BEGIN(0, 1);
10472 IEM_MC_LOCAL(uint64_t, u64Value);
10473 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10474 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10475 IEM_MC_ADVANCE_RIP();
10476 IEM_MC_END();
10477 return VINF_SUCCESS;
10478
10479 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10480 }
10481 }
10482 else
10483 {
10484 /*
10485 * We're loading a register from memory.
10486 */
10487 switch (pVCpu->iem.s.enmEffOpSize)
10488 {
10489 case IEMMODE_16BIT:
10490 IEM_MC_BEGIN(0, 2);
10491 IEM_MC_LOCAL(uint16_t, u16Value);
10492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10495 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10496 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10497 IEM_MC_ADVANCE_RIP();
10498 IEM_MC_END();
10499 return VINF_SUCCESS;
10500
10501 case IEMMODE_32BIT:
10502 IEM_MC_BEGIN(0, 2);
10503 IEM_MC_LOCAL(uint32_t, u32Value);
10504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10507 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10508 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10509 IEM_MC_ADVANCE_RIP();
10510 IEM_MC_END();
10511 return VINF_SUCCESS;
10512
10513 case IEMMODE_64BIT:
10514 IEM_MC_BEGIN(0, 2);
10515 IEM_MC_LOCAL(uint64_t, u64Value);
10516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10519 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10520 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10521 IEM_MC_ADVANCE_RIP();
10522 IEM_MC_END();
10523 return VINF_SUCCESS;
10524
10525 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10526 }
10527 }
10528}
10529
10530
10531/** Opcode 0x0f 0xb7. */
10532FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10533{
10534 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10535 IEMOP_HLP_MIN_386();
10536
10537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10538
10539 /** @todo Not entirely sure how the operand size prefix is handled here,
10540 * assuming that it will be ignored. Would be nice to have a few
10541 * test for this. */
10542 /*
10543 * If rm is denoting a register, no more instruction bytes.
10544 */
10545 if (IEM_IS_MODRM_REG_MODE(bRm))
10546 {
10547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10548 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10549 {
10550 IEM_MC_BEGIN(0, 1);
10551 IEM_MC_LOCAL(uint32_t, u32Value);
10552 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10553 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10554 IEM_MC_ADVANCE_RIP();
10555 IEM_MC_END();
10556 }
10557 else
10558 {
10559 IEM_MC_BEGIN(0, 1);
10560 IEM_MC_LOCAL(uint64_t, u64Value);
10561 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10562 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10563 IEM_MC_ADVANCE_RIP();
10564 IEM_MC_END();
10565 }
10566 }
10567 else
10568 {
10569 /*
10570 * We're loading a register from memory.
10571 */
10572 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10573 {
10574 IEM_MC_BEGIN(0, 2);
10575 IEM_MC_LOCAL(uint32_t, u32Value);
10576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10577 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10579 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10580 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10581 IEM_MC_ADVANCE_RIP();
10582 IEM_MC_END();
10583 }
10584 else
10585 {
10586 IEM_MC_BEGIN(0, 2);
10587 IEM_MC_LOCAL(uint64_t, u64Value);
10588 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10589 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10591 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10592 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10593 IEM_MC_ADVANCE_RIP();
10594 IEM_MC_END();
10595 }
10596 }
10597 return VINF_SUCCESS;
10598}
10599
10600
10601/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10602FNIEMOP_UD_STUB(iemOp_jmpe);
10603
10604
10605/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
10606FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10607{
10608 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10609 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10610 return iemOp_InvalidNeedRM(pVCpu);
10611#ifndef TST_IEM_CHECK_MC
10612# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10613 static const IEMOPBINSIZES s_Native =
10614 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10615# endif
10616 static const IEMOPBINSIZES s_Fallback =
10617 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10618#endif
10619 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback));
10620}
10621
10622
10623/**
10624 * @opcode 0xb9
10625 * @opinvalid intel-modrm
10626 * @optest ->
10627 */
10628FNIEMOP_DEF(iemOp_Grp10)
10629{
10630 /*
10631 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10632 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10633 */
10634 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10635 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10636 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10637}
10638
10639
10640/** Opcode 0x0f 0xba. */
10641FNIEMOP_DEF(iemOp_Grp8)
10642{
10643 IEMOP_HLP_MIN_386();
10644 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10645 PCIEMOPBINSIZES pImpl;
10646 switch (IEM_GET_MODRM_REG_8(bRm))
10647 {
10648 case 0: case 1: case 2: case 3:
10649 /* Both AMD and Intel want full modr/m decoding and imm8. */
10650 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
10651 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
10652 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
10653 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
10654 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
10655 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10656 }
10657 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10658
10659 if (IEM_IS_MODRM_REG_MODE(bRm))
10660 {
10661 /* register destination. */
10662 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10664
10665 switch (pVCpu->iem.s.enmEffOpSize)
10666 {
10667 case IEMMODE_16BIT:
10668 IEM_MC_BEGIN(3, 0);
10669 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10670 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
10671 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10672
10673 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10674 IEM_MC_REF_EFLAGS(pEFlags);
10675 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10676
10677 IEM_MC_ADVANCE_RIP();
10678 IEM_MC_END();
10679 return VINF_SUCCESS;
10680
10681 case IEMMODE_32BIT:
10682 IEM_MC_BEGIN(3, 0);
10683 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10684 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
10685 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10686
10687 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10688 IEM_MC_REF_EFLAGS(pEFlags);
10689 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10690
10691 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10692 IEM_MC_ADVANCE_RIP();
10693 IEM_MC_END();
10694 return VINF_SUCCESS;
10695
10696 case IEMMODE_64BIT:
10697 IEM_MC_BEGIN(3, 0);
10698 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10699 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
10700 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10701
10702 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10703 IEM_MC_REF_EFLAGS(pEFlags);
10704 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10705
10706 IEM_MC_ADVANCE_RIP();
10707 IEM_MC_END();
10708 return VINF_SUCCESS;
10709
10710 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10711 }
10712 }
10713 else
10714 {
10715 /* memory destination. */
10716
10717 uint32_t fAccess;
10718 if (pImpl->pfnLockedU16)
10719 fAccess = IEM_ACCESS_DATA_RW;
10720 else /* BT */
10721 fAccess = IEM_ACCESS_DATA_R;
10722
10723 /** @todo test negative bit offsets! */
10724 switch (pVCpu->iem.s.enmEffOpSize)
10725 {
10726 case IEMMODE_16BIT:
10727 IEM_MC_BEGIN(3, 1);
10728 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10729 IEM_MC_ARG(uint16_t, u16Src, 1);
10730 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10732
10733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10734 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10735 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
10736 if (pImpl->pfnLockedU16)
10737 IEMOP_HLP_DONE_DECODING();
10738 else
10739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10740 IEM_MC_FETCH_EFLAGS(EFlags);
10741 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10742 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10743 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10744 else
10745 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10746 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10747
10748 IEM_MC_COMMIT_EFLAGS(EFlags);
10749 IEM_MC_ADVANCE_RIP();
10750 IEM_MC_END();
10751 return VINF_SUCCESS;
10752
10753 case IEMMODE_32BIT:
10754 IEM_MC_BEGIN(3, 1);
10755 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10756 IEM_MC_ARG(uint32_t, u32Src, 1);
10757 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10759
10760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10761 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10762 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
10763 if (pImpl->pfnLockedU16)
10764 IEMOP_HLP_DONE_DECODING();
10765 else
10766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10767 IEM_MC_FETCH_EFLAGS(EFlags);
10768 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10769 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10770 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10771 else
10772 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10773 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10774
10775 IEM_MC_COMMIT_EFLAGS(EFlags);
10776 IEM_MC_ADVANCE_RIP();
10777 IEM_MC_END();
10778 return VINF_SUCCESS;
10779
10780 case IEMMODE_64BIT:
10781 IEM_MC_BEGIN(3, 1);
10782 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10783 IEM_MC_ARG(uint64_t, u64Src, 1);
10784 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10786
10787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10788 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
10789 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
10790 if (pImpl->pfnLockedU16)
10791 IEMOP_HLP_DONE_DECODING();
10792 else
10793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10794 IEM_MC_FETCH_EFLAGS(EFlags);
10795 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10796 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10797 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10798 else
10799 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10800 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10801
10802 IEM_MC_COMMIT_EFLAGS(EFlags);
10803 IEM_MC_ADVANCE_RIP();
10804 IEM_MC_END();
10805 return VINF_SUCCESS;
10806
10807 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10808 }
10809 }
10810}
10811
10812
10813/** Opcode 0x0f 0xbb. */
10814FNIEMOP_DEF(iemOp_btc_Ev_Gv)
10815{
10816 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
10817 IEMOP_HLP_MIN_386();
10818 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
10819}
10820
10821
10822/**
10823 * Common worker for BSF and BSR instructions.
10824 *
10825 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
10826 * the destination register, which means that for 32-bit operations the high
10827 * bits must be left alone.
10828 *
10829 * @param pImpl Pointer to the instruction implementation (assembly).
10830 */
10831FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
10832{
10833 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10834
10835 /*
10836 * If rm is denoting a register, no more instruction bytes.
10837 */
10838 if (IEM_IS_MODRM_REG_MODE(bRm))
10839 {
10840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10841 switch (pVCpu->iem.s.enmEffOpSize)
10842 {
10843 case IEMMODE_16BIT:
10844 IEM_MC_BEGIN(3, 0);
10845 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10846 IEM_MC_ARG(uint16_t, u16Src, 1);
10847 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10848
10849 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10850 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10851 IEM_MC_REF_EFLAGS(pEFlags);
10852 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10853
10854 IEM_MC_ADVANCE_RIP();
10855 IEM_MC_END();
10856 break;
10857
10858 case IEMMODE_32BIT:
10859 IEM_MC_BEGIN(3, 0);
10860 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10861 IEM_MC_ARG(uint32_t, u32Src, 1);
10862 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10863
10864 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10865 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10866 IEM_MC_REF_EFLAGS(pEFlags);
10867 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10868 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10869 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10870 IEM_MC_ENDIF();
10871 IEM_MC_ADVANCE_RIP();
10872 IEM_MC_END();
10873 break;
10874
10875 case IEMMODE_64BIT:
10876 IEM_MC_BEGIN(3, 0);
10877 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10878 IEM_MC_ARG(uint64_t, u64Src, 1);
10879 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10880
10881 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10882 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10883 IEM_MC_REF_EFLAGS(pEFlags);
10884 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10885
10886 IEM_MC_ADVANCE_RIP();
10887 IEM_MC_END();
10888 break;
10889 }
10890 }
10891 else
10892 {
10893 /*
10894 * We're accessing memory.
10895 */
10896 switch (pVCpu->iem.s.enmEffOpSize)
10897 {
10898 case IEMMODE_16BIT:
10899 IEM_MC_BEGIN(3, 1);
10900 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10901 IEM_MC_ARG(uint16_t, u16Src, 1);
10902 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10904
10905 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10907 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10908 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10909 IEM_MC_REF_EFLAGS(pEFlags);
10910 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10911
10912 IEM_MC_ADVANCE_RIP();
10913 IEM_MC_END();
10914 break;
10915
10916 case IEMMODE_32BIT:
10917 IEM_MC_BEGIN(3, 1);
10918 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10919 IEM_MC_ARG(uint32_t, u32Src, 1);
10920 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10922
10923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10925 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10926 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10927 IEM_MC_REF_EFLAGS(pEFlags);
10928 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10929
10930 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10931 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10932 IEM_MC_ENDIF();
10933 IEM_MC_ADVANCE_RIP();
10934 IEM_MC_END();
10935 break;
10936
10937 case IEMMODE_64BIT:
10938 IEM_MC_BEGIN(3, 1);
10939 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10940 IEM_MC_ARG(uint64_t, u64Src, 1);
10941 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10943
10944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10946 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10947 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10948 IEM_MC_REF_EFLAGS(pEFlags);
10949 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10950
10951 IEM_MC_ADVANCE_RIP();
10952 IEM_MC_END();
10953 break;
10954 }
10955 }
10956 return VINF_SUCCESS;
10957}
10958
10959
10960/** Opcode 0x0f 0xbc. */
10961FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
10962{
10963 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
10964 IEMOP_HLP_MIN_386();
10965 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10966 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
10967}
10968
10969
10970/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
10971FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
10972{
10973 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
10974 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
10975 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10976
10977#ifndef TST_IEM_CHECK_MC
10978 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
10979 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
10980 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
10981 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
10982 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
10983 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
10984 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
10985 {
10986 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
10987 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
10988 };
10989#endif
10990 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
10991 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
10992 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
10993}
10994
10995
10996/** Opcode 0x0f 0xbd. */
10997FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
10998{
10999 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
11000 IEMOP_HLP_MIN_386();
11001 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11002 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
11003}
11004
11005
11006/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
11007FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11008{
11009 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11010 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11011 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11012
11013#ifndef TST_IEM_CHECK_MC
11014 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11015 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11016 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11017 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11018 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11019 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11020 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11021 {
11022 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11023 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11024 };
11025#endif
11026 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11027 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
11028 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
11029}
11030
11031
11032
11033/** Opcode 0x0f 0xbe. */
11034FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11035{
11036 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11037 IEMOP_HLP_MIN_386();
11038
11039 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11040
11041 /*
11042 * If rm is denoting a register, no more instruction bytes.
11043 */
11044 if (IEM_IS_MODRM_REG_MODE(bRm))
11045 {
11046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11047 switch (pVCpu->iem.s.enmEffOpSize)
11048 {
11049 case IEMMODE_16BIT:
11050 IEM_MC_BEGIN(0, 1);
11051 IEM_MC_LOCAL(uint16_t, u16Value);
11052 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11053 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11054 IEM_MC_ADVANCE_RIP();
11055 IEM_MC_END();
11056 return VINF_SUCCESS;
11057
11058 case IEMMODE_32BIT:
11059 IEM_MC_BEGIN(0, 1);
11060 IEM_MC_LOCAL(uint32_t, u32Value);
11061 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11062 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11063 IEM_MC_ADVANCE_RIP();
11064 IEM_MC_END();
11065 return VINF_SUCCESS;
11066
11067 case IEMMODE_64BIT:
11068 IEM_MC_BEGIN(0, 1);
11069 IEM_MC_LOCAL(uint64_t, u64Value);
11070 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11071 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11072 IEM_MC_ADVANCE_RIP();
11073 IEM_MC_END();
11074 return VINF_SUCCESS;
11075
11076 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11077 }
11078 }
11079 else
11080 {
11081 /*
11082 * We're loading a register from memory.
11083 */
11084 switch (pVCpu->iem.s.enmEffOpSize)
11085 {
11086 case IEMMODE_16BIT:
11087 IEM_MC_BEGIN(0, 2);
11088 IEM_MC_LOCAL(uint16_t, u16Value);
11089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11092 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11093 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11094 IEM_MC_ADVANCE_RIP();
11095 IEM_MC_END();
11096 return VINF_SUCCESS;
11097
11098 case IEMMODE_32BIT:
11099 IEM_MC_BEGIN(0, 2);
11100 IEM_MC_LOCAL(uint32_t, u32Value);
11101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11104 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11105 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11106 IEM_MC_ADVANCE_RIP();
11107 IEM_MC_END();
11108 return VINF_SUCCESS;
11109
11110 case IEMMODE_64BIT:
11111 IEM_MC_BEGIN(0, 2);
11112 IEM_MC_LOCAL(uint64_t, u64Value);
11113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11114 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11116 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11117 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11118 IEM_MC_ADVANCE_RIP();
11119 IEM_MC_END();
11120 return VINF_SUCCESS;
11121
11122 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11123 }
11124 }
11125}
11126
11127
11128/** Opcode 0x0f 0xbf. */
11129FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11130{
11131 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11132 IEMOP_HLP_MIN_386();
11133
11134 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11135
11136 /** @todo Not entirely sure how the operand size prefix is handled here,
11137 * assuming that it will be ignored. Would be nice to have a few
11138 * test for this. */
11139 /*
11140 * If rm is denoting a register, no more instruction bytes.
11141 */
11142 if (IEM_IS_MODRM_REG_MODE(bRm))
11143 {
11144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11145 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11146 {
11147 IEM_MC_BEGIN(0, 1);
11148 IEM_MC_LOCAL(uint32_t, u32Value);
11149 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11150 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11151 IEM_MC_ADVANCE_RIP();
11152 IEM_MC_END();
11153 }
11154 else
11155 {
11156 IEM_MC_BEGIN(0, 1);
11157 IEM_MC_LOCAL(uint64_t, u64Value);
11158 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11159 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11160 IEM_MC_ADVANCE_RIP();
11161 IEM_MC_END();
11162 }
11163 }
11164 else
11165 {
11166 /*
11167 * We're loading a register from memory.
11168 */
11169 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11170 {
11171 IEM_MC_BEGIN(0, 2);
11172 IEM_MC_LOCAL(uint32_t, u32Value);
11173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11176 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11177 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11178 IEM_MC_ADVANCE_RIP();
11179 IEM_MC_END();
11180 }
11181 else
11182 {
11183 IEM_MC_BEGIN(0, 2);
11184 IEM_MC_LOCAL(uint64_t, u64Value);
11185 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11188 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11189 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11190 IEM_MC_ADVANCE_RIP();
11191 IEM_MC_END();
11192 }
11193 }
11194 return VINF_SUCCESS;
11195}
11196
11197
11198/** Opcode 0x0f 0xc0. */
11199FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11200{
11201 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11202 IEMOP_HLP_MIN_486();
11203 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11204
11205 /*
11206 * If rm is denoting a register, no more instruction bytes.
11207 */
11208 if (IEM_IS_MODRM_REG_MODE(bRm))
11209 {
11210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11211
11212 IEM_MC_BEGIN(3, 0);
11213 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11214 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11215 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11216
11217 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11218 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11219 IEM_MC_REF_EFLAGS(pEFlags);
11220 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11221
11222 IEM_MC_ADVANCE_RIP();
11223 IEM_MC_END();
11224 }
11225 else
11226 {
11227 /*
11228 * We're accessing memory.
11229 */
11230 IEM_MC_BEGIN(3, 3);
11231 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11232 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11233 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11234 IEM_MC_LOCAL(uint8_t, u8RegCopy);
11235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11236
11237 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11238 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11239 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11240 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
11241 IEM_MC_FETCH_EFLAGS(EFlags);
11242 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11243 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11244 else
11245 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
11246
11247 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
11248 IEM_MC_COMMIT_EFLAGS(EFlags);
11249 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
11250 IEM_MC_ADVANCE_RIP();
11251 IEM_MC_END();
11252 return VINF_SUCCESS;
11253 }
11254 return VINF_SUCCESS;
11255}
11256
11257
11258/** Opcode 0x0f 0xc1. */
11259FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11260{
11261 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11262 IEMOP_HLP_MIN_486();
11263 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11264
11265 /*
11266 * If rm is denoting a register, no more instruction bytes.
11267 */
11268 if (IEM_IS_MODRM_REG_MODE(bRm))
11269 {
11270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11271
11272 switch (pVCpu->iem.s.enmEffOpSize)
11273 {
11274 case IEMMODE_16BIT:
11275 IEM_MC_BEGIN(3, 0);
11276 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11277 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11278 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11279
11280 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11281 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11282 IEM_MC_REF_EFLAGS(pEFlags);
11283 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11284
11285 IEM_MC_ADVANCE_RIP();
11286 IEM_MC_END();
11287 return VINF_SUCCESS;
11288
11289 case IEMMODE_32BIT:
11290 IEM_MC_BEGIN(3, 0);
11291 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11292 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11293 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11294
11295 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11296 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11297 IEM_MC_REF_EFLAGS(pEFlags);
11298 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11299
11300 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11301 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11302 IEM_MC_ADVANCE_RIP();
11303 IEM_MC_END();
11304 return VINF_SUCCESS;
11305
11306 case IEMMODE_64BIT:
11307 IEM_MC_BEGIN(3, 0);
11308 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11309 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11310 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11311
11312 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11313 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11314 IEM_MC_REF_EFLAGS(pEFlags);
11315 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11316
11317 IEM_MC_ADVANCE_RIP();
11318 IEM_MC_END();
11319 return VINF_SUCCESS;
11320
11321 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11322 }
11323 }
11324 else
11325 {
11326 /*
11327 * We're accessing memory.
11328 */
11329 switch (pVCpu->iem.s.enmEffOpSize)
11330 {
11331 case IEMMODE_16BIT:
11332 IEM_MC_BEGIN(3, 3);
11333 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11334 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11335 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11336 IEM_MC_LOCAL(uint16_t, u16RegCopy);
11337 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11338
11339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11340 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11341 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11342 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
11343 IEM_MC_FETCH_EFLAGS(EFlags);
11344 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11345 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11346 else
11347 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
11348
11349 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
11350 IEM_MC_COMMIT_EFLAGS(EFlags);
11351 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
11352 IEM_MC_ADVANCE_RIP();
11353 IEM_MC_END();
11354 return VINF_SUCCESS;
11355
11356 case IEMMODE_32BIT:
11357 IEM_MC_BEGIN(3, 3);
11358 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11359 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11360 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11361 IEM_MC_LOCAL(uint32_t, u32RegCopy);
11362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11363
11364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11365 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11366 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11367 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
11368 IEM_MC_FETCH_EFLAGS(EFlags);
11369 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11370 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11371 else
11372 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
11373
11374 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
11375 IEM_MC_COMMIT_EFLAGS(EFlags);
11376 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
11377 IEM_MC_ADVANCE_RIP();
11378 IEM_MC_END();
11379 return VINF_SUCCESS;
11380
11381 case IEMMODE_64BIT:
11382 IEM_MC_BEGIN(3, 3);
11383 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11384 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11385 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11386 IEM_MC_LOCAL(uint64_t, u64RegCopy);
11387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11388
11389 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11390 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11391 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11392 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
11393 IEM_MC_FETCH_EFLAGS(EFlags);
11394 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11395 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11396 else
11397 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
11398
11399 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
11400 IEM_MC_COMMIT_EFLAGS(EFlags);
11401 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
11402 IEM_MC_ADVANCE_RIP();
11403 IEM_MC_END();
11404 return VINF_SUCCESS;
11405
11406 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11407 }
11408 }
11409}
11410
11411
11412/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11413FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11414{
11415 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11416
11417 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11418 if (IEM_IS_MODRM_REG_MODE(bRm))
11419 {
11420 /*
11421 * Register, register.
11422 */
11423 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11425 IEM_MC_BEGIN(4, 2);
11426 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11427 IEM_MC_LOCAL(X86XMMREG, Dst);
11428 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11429 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11430 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11431 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11432 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11433 IEM_MC_PREPARE_SSE_USAGE();
11434 IEM_MC_REF_MXCSR(pfMxcsr);
11435 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11436 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11437 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11438 IEM_MC_IF_MXCSR_XCPT_PENDING()
11439 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11440 IEM_MC_ELSE()
11441 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11442 IEM_MC_ENDIF();
11443
11444 IEM_MC_ADVANCE_RIP();
11445 IEM_MC_END();
11446 }
11447 else
11448 {
11449 /*
11450 * Register, memory.
11451 */
11452 IEM_MC_BEGIN(4, 3);
11453 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11454 IEM_MC_LOCAL(X86XMMREG, Dst);
11455 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11456 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11457 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11459
11460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11461 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11462 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11464 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11465 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11466
11467 IEM_MC_PREPARE_SSE_USAGE();
11468 IEM_MC_REF_MXCSR(pfMxcsr);
11469 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11470 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11471 IEM_MC_IF_MXCSR_XCPT_PENDING()
11472 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11473 IEM_MC_ELSE()
11474 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11475 IEM_MC_ENDIF();
11476
11477 IEM_MC_ADVANCE_RIP();
11478 IEM_MC_END();
11479 }
11480 return VINF_SUCCESS;
11481}
11482
11483
11484/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11485FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11486{
11487 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11488
11489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11490 if (IEM_IS_MODRM_REG_MODE(bRm))
11491 {
11492 /*
11493 * Register, register.
11494 */
11495 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11497 IEM_MC_BEGIN(4, 2);
11498 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11499 IEM_MC_LOCAL(X86XMMREG, Dst);
11500 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11501 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11502 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11503 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11504 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11505 IEM_MC_PREPARE_SSE_USAGE();
11506 IEM_MC_REF_MXCSR(pfMxcsr);
11507 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11508 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11509 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11510 IEM_MC_IF_MXCSR_XCPT_PENDING()
11511 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11512 IEM_MC_ELSE()
11513 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11514 IEM_MC_ENDIF();
11515
11516 IEM_MC_ADVANCE_RIP();
11517 IEM_MC_END();
11518 }
11519 else
11520 {
11521 /*
11522 * Register, memory.
11523 */
11524 IEM_MC_BEGIN(4, 3);
11525 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11526 IEM_MC_LOCAL(X86XMMREG, Dst);
11527 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11528 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11529 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11530 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11531
11532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11533 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11534 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11536 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11537 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11538
11539 IEM_MC_PREPARE_SSE_USAGE();
11540 IEM_MC_REF_MXCSR(pfMxcsr);
11541 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11542 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11543 IEM_MC_IF_MXCSR_XCPT_PENDING()
11544 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11545 IEM_MC_ELSE()
11546 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11547 IEM_MC_ENDIF();
11548
11549 IEM_MC_ADVANCE_RIP();
11550 IEM_MC_END();
11551 }
11552 return VINF_SUCCESS;
11553}
11554
11555
11556/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11557FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11558{
11559 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11560
11561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11562 if (IEM_IS_MODRM_REG_MODE(bRm))
11563 {
11564 /*
11565 * Register, register.
11566 */
11567 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11569 IEM_MC_BEGIN(4, 2);
11570 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11571 IEM_MC_LOCAL(X86XMMREG, Dst);
11572 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11573 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11574 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11575 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11576 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11577 IEM_MC_PREPARE_SSE_USAGE();
11578 IEM_MC_REF_MXCSR(pfMxcsr);
11579 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11580 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11581 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11582 IEM_MC_IF_MXCSR_XCPT_PENDING()
11583 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11584 IEM_MC_ELSE()
11585 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11586 IEM_MC_ENDIF();
11587
11588 IEM_MC_ADVANCE_RIP();
11589 IEM_MC_END();
11590 }
11591 else
11592 {
11593 /*
11594 * Register, memory.
11595 */
11596 IEM_MC_BEGIN(4, 3);
11597 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11598 IEM_MC_LOCAL(X86XMMREG, Dst);
11599 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11600 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11601 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11603
11604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11605 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11606 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11608 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11609 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11610
11611 IEM_MC_PREPARE_SSE_USAGE();
11612 IEM_MC_REF_MXCSR(pfMxcsr);
11613 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11614 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11615 IEM_MC_IF_MXCSR_XCPT_PENDING()
11616 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11617 IEM_MC_ELSE()
11618 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11619 IEM_MC_ENDIF();
11620
11621 IEM_MC_ADVANCE_RIP();
11622 IEM_MC_END();
11623 }
11624 return VINF_SUCCESS;
11625}
11626
11627
11628/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11629FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11630{
11631 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11632
11633 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11634 if (IEM_IS_MODRM_REG_MODE(bRm))
11635 {
11636 /*
11637 * Register, register.
11638 */
11639 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11641 IEM_MC_BEGIN(4, 2);
11642 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11643 IEM_MC_LOCAL(X86XMMREG, Dst);
11644 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11645 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11646 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11647 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11648 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11649 IEM_MC_PREPARE_SSE_USAGE();
11650 IEM_MC_REF_MXCSR(pfMxcsr);
11651 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11652 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11653 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11654 IEM_MC_IF_MXCSR_XCPT_PENDING()
11655 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11656 IEM_MC_ELSE()
11657 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11658 IEM_MC_ENDIF();
11659
11660 IEM_MC_ADVANCE_RIP();
11661 IEM_MC_END();
11662 }
11663 else
11664 {
11665 /*
11666 * Register, memory.
11667 */
11668 IEM_MC_BEGIN(4, 3);
11669 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11670 IEM_MC_LOCAL(X86XMMREG, Dst);
11671 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11672 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11673 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11675
11676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11677 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11678 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11680 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11681 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11682
11683 IEM_MC_PREPARE_SSE_USAGE();
11684 IEM_MC_REF_MXCSR(pfMxcsr);
11685 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11686 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11687 IEM_MC_IF_MXCSR_XCPT_PENDING()
11688 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11689 IEM_MC_ELSE()
11690 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11691 IEM_MC_ENDIF();
11692
11693 IEM_MC_ADVANCE_RIP();
11694 IEM_MC_END();
11695 }
11696 return VINF_SUCCESS;
11697}
11698
11699
11700/** Opcode 0x0f 0xc3. */
11701FNIEMOP_DEF(iemOp_movnti_My_Gy)
11702{
11703 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
11704
11705 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11706
11707 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
11708 if (IEM_IS_MODRM_MEM_MODE(bRm))
11709 {
11710 switch (pVCpu->iem.s.enmEffOpSize)
11711 {
11712 case IEMMODE_32BIT:
11713 IEM_MC_BEGIN(0, 2);
11714 IEM_MC_LOCAL(uint32_t, u32Value);
11715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11716
11717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11719 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
11720 return IEMOP_RAISE_INVALID_OPCODE();
11721
11722 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11723 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11724 IEM_MC_ADVANCE_RIP();
11725 IEM_MC_END();
11726 break;
11727
11728 case IEMMODE_64BIT:
11729 IEM_MC_BEGIN(0, 2);
11730 IEM_MC_LOCAL(uint64_t, u64Value);
11731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11732
11733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11735 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
11736 return IEMOP_RAISE_INVALID_OPCODE();
11737
11738 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11739 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11740 IEM_MC_ADVANCE_RIP();
11741 IEM_MC_END();
11742 break;
11743
11744 case IEMMODE_16BIT:
11745 /** @todo check this form. */
11746 return IEMOP_RAISE_INVALID_OPCODE();
11747 }
11748 }
11749 else
11750 return IEMOP_RAISE_INVALID_OPCODE();
11751 return VINF_SUCCESS;
11752}
11753
11754
11755/* Opcode 0x66 0x0f 0xc3 - invalid */
11756/* Opcode 0xf3 0x0f 0xc3 - invalid */
11757/* Opcode 0xf2 0x0f 0xc3 - invalid */
11758
11759
11760/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
11761FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
11762{
11763 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11765 if (IEM_IS_MODRM_REG_MODE(bRm))
11766 {
11767 /*
11768 * Register, register.
11769 */
11770 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11772 IEM_MC_BEGIN(3, 0);
11773 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11774 IEM_MC_ARG(uint16_t, u16Src, 1);
11775 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11776 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11777 IEM_MC_PREPARE_FPU_USAGE();
11778 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11779 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11780 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bEvilArg);
11781 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11782 IEM_MC_FPU_TO_MMX_MODE();
11783 IEM_MC_ADVANCE_RIP();
11784 IEM_MC_END();
11785 }
11786 else
11787 {
11788 /*
11789 * Register, memory.
11790 */
11791 IEM_MC_BEGIN(3, 2);
11792 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11793 IEM_MC_ARG(uint16_t, u16Src, 1);
11794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11795
11796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11797 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11798 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11800 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11801 IEM_MC_PREPARE_FPU_USAGE();
11802
11803 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11804 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11805 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bEvilArg);
11806 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11807 IEM_MC_FPU_TO_MMX_MODE();
11808 IEM_MC_ADVANCE_RIP();
11809 IEM_MC_END();
11810 }
11811 return VINF_SUCCESS;
11812}
11813
11814
11815/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
11816FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
11817{
11818 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11819 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11820 if (IEM_IS_MODRM_REG_MODE(bRm))
11821 {
11822 /*
11823 * Register, register.
11824 */
11825 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11827 IEM_MC_BEGIN(3, 0);
11828 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11829 IEM_MC_ARG(uint16_t, u16Src, 1);
11830 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11831 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11832 IEM_MC_PREPARE_SSE_USAGE();
11833 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11834 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11835 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bEvilArg);
11836 IEM_MC_ADVANCE_RIP();
11837 IEM_MC_END();
11838 }
11839 else
11840 {
11841 /*
11842 * Register, memory.
11843 */
11844 IEM_MC_BEGIN(3, 2);
11845 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11846 IEM_MC_ARG(uint16_t, u16Src, 1);
11847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11848
11849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11850 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11851 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11853 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11854 IEM_MC_PREPARE_SSE_USAGE();
11855
11856 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11857 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11858 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bEvilArg);
11859 IEM_MC_ADVANCE_RIP();
11860 IEM_MC_END();
11861 }
11862 return VINF_SUCCESS;
11863}
11864
11865
11866/* Opcode 0xf3 0x0f 0xc4 - invalid */
11867/* Opcode 0xf2 0x0f 0xc4 - invalid */
11868
11869
11870/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
11871FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
11872{
11873 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pinsrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);*/ /** @todo */
11874 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11875 if (IEM_IS_MODRM_REG_MODE(bRm))
11876 {
11877 /*
11878 * Register, register.
11879 */
11880 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11882 IEM_MC_BEGIN(3, 1);
11883 IEM_MC_LOCAL(uint16_t, u16Dst);
11884 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
11885 IEM_MC_ARG(uint64_t, u64Src, 1);
11886 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11887 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
11888 IEM_MC_PREPARE_FPU_USAGE();
11889 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11890 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u64, pu16Dst, u64Src, bEvilArg);
11891 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
11892 IEM_MC_FPU_TO_MMX_MODE();
11893 IEM_MC_ADVANCE_RIP();
11894 IEM_MC_END();
11895 return VINF_SUCCESS;
11896 }
11897
11898 /* No memory operand. */
11899 return IEMOP_RAISE_INVALID_OPCODE();
11900}
11901
11902
11903/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
11904FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
11905{
11906 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11907 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11908 if (IEM_IS_MODRM_REG_MODE(bRm))
11909 {
11910 /*
11911 * Register, register.
11912 */
11913 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11915 IEM_MC_BEGIN(3, 1);
11916 IEM_MC_LOCAL(uint16_t, u16Dst);
11917 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
11918 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
11919 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11920 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11921 IEM_MC_PREPARE_SSE_USAGE();
11922 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11923 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u128, pu16Dst, puSrc, bEvilArg);
11924 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
11925 IEM_MC_ADVANCE_RIP();
11926 IEM_MC_END();
11927 return VINF_SUCCESS;
11928 }
11929
11930 /* No memory operand. */
11931 return IEMOP_RAISE_INVALID_OPCODE();
11932}
11933
11934
11935/* Opcode 0xf3 0x0f 0xc5 - invalid */
11936/* Opcode 0xf2 0x0f 0xc5 - invalid */
11937
11938
11939/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
11940FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
11941{
11942 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11943 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11944 if (IEM_IS_MODRM_REG_MODE(bRm))
11945 {
11946 /*
11947 * Register, register.
11948 */
11949 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11951 IEM_MC_BEGIN(3, 0);
11952 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11953 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
11954 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11955 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11956 IEM_MC_PREPARE_SSE_USAGE();
11957 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11958 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
11959 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bEvilArg);
11960 IEM_MC_ADVANCE_RIP();
11961 IEM_MC_END();
11962 }
11963 else
11964 {
11965 /*
11966 * Register, memory.
11967 */
11968 IEM_MC_BEGIN(3, 2);
11969 IEM_MC_ARG(PRTUINT128U, pDst, 0);
11970 IEM_MC_LOCAL(RTUINT128U, uSrc);
11971 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
11972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11973
11974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11975 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
11976 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
11977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11978 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11979 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11980
11981 IEM_MC_PREPARE_SSE_USAGE();
11982 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11983 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bEvilArg);
11984
11985 IEM_MC_ADVANCE_RIP();
11986 IEM_MC_END();
11987 }
11988 return VINF_SUCCESS;
11989}
11990
11991
11992/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
11993FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
11994{
11995 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11996 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11997 if (IEM_IS_MODRM_REG_MODE(bRm))
11998 {
11999 /*
12000 * Register, register.
12001 */
12002 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
12003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12004 IEM_MC_BEGIN(3, 0);
12005 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12006 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12007 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
12008 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12009 IEM_MC_PREPARE_SSE_USAGE();
12010 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12011 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12012 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bEvilArg);
12013 IEM_MC_ADVANCE_RIP();
12014 IEM_MC_END();
12015 }
12016 else
12017 {
12018 /*
12019 * Register, memory.
12020 */
12021 IEM_MC_BEGIN(3, 2);
12022 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12023 IEM_MC_LOCAL(RTUINT128U, uSrc);
12024 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12026
12027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12028 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
12029 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
12030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12031 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12032 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12033
12034 IEM_MC_PREPARE_SSE_USAGE();
12035 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12036 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bEvilArg);
12037
12038 IEM_MC_ADVANCE_RIP();
12039 IEM_MC_END();
12040 }
12041 return VINF_SUCCESS;
12042}
12043
12044
12045/* Opcode 0xf3 0x0f 0xc6 - invalid */
12046/* Opcode 0xf2 0x0f 0xc6 - invalid */
12047
12048
12049/** Opcode 0x0f 0xc7 !11/1. */
12050FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12051{
12052 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12053
12054 IEM_MC_BEGIN(4, 3);
12055 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
12056 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
12057 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
12058 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12059 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
12060 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
12061 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12062
12063 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12064 IEMOP_HLP_DONE_DECODING();
12065 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12066
12067 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
12068 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
12069 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
12070
12071 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
12072 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
12073 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
12074
12075 IEM_MC_FETCH_EFLAGS(EFlags);
12076 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12077 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12078 else
12079 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12080
12081 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
12082 IEM_MC_COMMIT_EFLAGS(EFlags);
12083 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
12084 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
12085 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
12086 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
12087 IEM_MC_ENDIF();
12088 IEM_MC_ADVANCE_RIP();
12089
12090 IEM_MC_END();
12091 return VINF_SUCCESS;
12092}
12093
12094
12095/** Opcode REX.W 0x0f 0xc7 !11/1. */
12096FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12097{
12098 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12099 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
12100 {
12101#if 0
12102 RT_NOREF(bRm);
12103 IEMOP_BITCH_ABOUT_STUB();
12104 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
12105#else
12106 IEM_MC_BEGIN(4, 3);
12107 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
12108 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
12109 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
12110 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12111 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
12112 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
12113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12114
12115 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12116 IEMOP_HLP_DONE_DECODING();
12117 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
12118 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12119
12120 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
12121 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
12122 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
12123
12124 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
12125 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
12126 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
12127
12128 IEM_MC_FETCH_EFLAGS(EFlags);
12129# if defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64)
12130# if defined(RT_ARCH_AMD64)
12131 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
12132# endif
12133 {
12134 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12135 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12136 else
12137 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12138 }
12139# if defined(RT_ARCH_AMD64)
12140 else
12141# endif
12142# endif
12143# if !defined(RT_ARCH_ARM64) /** @todo may need this for unaligned accesses... */
12144 {
12145 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12146 accesses and not all all atomic, which works fine on in UNI CPU guest
12147 configuration (ignoring DMA). If guest SMP is active we have no choice
12148 but to use a rendezvous callback here. Sigh. */
12149 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12150 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12151 else
12152 {
12153 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12154 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12155 }
12156 }
12157# endif
12158
12159 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
12160 IEM_MC_COMMIT_EFLAGS(EFlags);
12161 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
12162 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
12163 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
12164 IEM_MC_ENDIF();
12165 IEM_MC_ADVANCE_RIP();
12166
12167 IEM_MC_END();
12168 return VINF_SUCCESS;
12169#endif
12170 }
12171 Log(("cmpxchg16b -> #UD\n"));
12172 return IEMOP_RAISE_INVALID_OPCODE();
12173}
12174
12175FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12176{
12177 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12178 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12179 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12180}
12181
12182/** Opcode 0x0f 0xc7 11/6. */
12183FNIEMOP_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
12184
12185/** Opcode 0x0f 0xc7 !11/6. */
12186#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12187FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12188{
12189 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12190 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12191 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12192 IEM_MC_BEGIN(2, 0);
12193 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12194 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12196 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12197 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12198 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12199 IEM_MC_END();
12200 return VINF_SUCCESS;
12201}
12202#else
12203FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12204#endif
12205
12206/** Opcode 0x66 0x0f 0xc7 !11/6. */
12207#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12208FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12209{
12210 IEMOP_MNEMONIC(vmclear, "vmclear");
12211 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12212 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12213 IEM_MC_BEGIN(2, 0);
12214 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12215 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12217 IEMOP_HLP_DONE_DECODING();
12218 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12219 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12220 IEM_MC_END();
12221 return VINF_SUCCESS;
12222}
12223#else
12224FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12225#endif
12226
12227/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12228#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12229FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12230{
12231 IEMOP_MNEMONIC(vmxon, "vmxon");
12232 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12233 IEM_MC_BEGIN(2, 0);
12234 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12235 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12237 IEMOP_HLP_DONE_DECODING();
12238 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12239 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12240 IEM_MC_END();
12241 return VINF_SUCCESS;
12242}
12243#else
12244FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12245#endif
12246
12247/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12248#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12249FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12250{
12251 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12252 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12253 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12254 IEM_MC_BEGIN(2, 0);
12255 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12256 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12258 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12259 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12260 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12261 IEM_MC_END();
12262 return VINF_SUCCESS;
12263}
12264#else
12265FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12266#endif
12267
12268/** Opcode 0x0f 0xc7 11/7. */
12269FNIEMOP_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
12270
12271
12272/**
12273 * Group 9 jump table for register variant.
12274 */
12275IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12276{ /* pfx: none, 066h, 0f3h, 0f2h */
12277 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12278 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12279 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12280 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12281 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12282 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12283 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12284 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12285};
12286AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12287
12288
12289/**
12290 * Group 9 jump table for memory variant.
12291 */
12292IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12293{ /* pfx: none, 066h, 0f3h, 0f2h */
12294 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12295 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12296 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12297 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12298 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12299 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12300 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12301 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12302};
12303AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12304
12305
12306/** Opcode 0x0f 0xc7. */
12307FNIEMOP_DEF(iemOp_Grp9)
12308{
12309 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
12310 if (IEM_IS_MODRM_REG_MODE(bRm))
12311 /* register, register */
12312 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12313 + pVCpu->iem.s.idxPrefix], bRm);
12314 /* memory, register */
12315 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12316 + pVCpu->iem.s.idxPrefix], bRm);
12317}
12318
12319
12320/**
12321 * Common 'bswap register' helper.
12322 */
12323FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12324{
12325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12326 switch (pVCpu->iem.s.enmEffOpSize)
12327 {
12328 case IEMMODE_16BIT:
12329 IEM_MC_BEGIN(1, 0);
12330 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12331 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12332 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12333 IEM_MC_ADVANCE_RIP();
12334 IEM_MC_END();
12335 return VINF_SUCCESS;
12336
12337 case IEMMODE_32BIT:
12338 IEM_MC_BEGIN(1, 0);
12339 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12340 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12341 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12342 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12343 IEM_MC_ADVANCE_RIP();
12344 IEM_MC_END();
12345 return VINF_SUCCESS;
12346
12347 case IEMMODE_64BIT:
12348 IEM_MC_BEGIN(1, 0);
12349 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12350 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12351 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12352 IEM_MC_ADVANCE_RIP();
12353 IEM_MC_END();
12354 return VINF_SUCCESS;
12355
12356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12357 }
12358}
12359
12360
12361/** Opcode 0x0f 0xc8. */
12362FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12363{
12364 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12365 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12366 prefix. REX.B is the correct prefix it appears. For a parallel
12367 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12368 IEMOP_HLP_MIN_486();
12369 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12370}
12371
12372
12373/** Opcode 0x0f 0xc9. */
12374FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12375{
12376 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12377 IEMOP_HLP_MIN_486();
12378 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12379}
12380
12381
12382/** Opcode 0x0f 0xca. */
12383FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12384{
12385 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
12386 IEMOP_HLP_MIN_486();
12387 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12388}
12389
12390
12391/** Opcode 0x0f 0xcb. */
12392FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12393{
12394 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
12395 IEMOP_HLP_MIN_486();
12396 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12397}
12398
12399
12400/** Opcode 0x0f 0xcc. */
12401FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12402{
12403 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12404 IEMOP_HLP_MIN_486();
12405 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12406}
12407
12408
12409/** Opcode 0x0f 0xcd. */
12410FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12411{
12412 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12413 IEMOP_HLP_MIN_486();
12414 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12415}
12416
12417
12418/** Opcode 0x0f 0xce. */
12419FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12420{
12421 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12422 IEMOP_HLP_MIN_486();
12423 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12424}
12425
12426
12427/** Opcode 0x0f 0xcf. */
12428FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12429{
12430 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12431 IEMOP_HLP_MIN_486();
12432 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12433}
12434
12435
12436/* Opcode 0x0f 0xd0 - invalid */
12437
12438
12439/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12440FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12441{
12442 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
12443 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12444}
12445
12446
12447/* Opcode 0xf3 0x0f 0xd0 - invalid */
12448
12449
12450/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12451FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12452{
12453 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
12454 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12455}
12456
12457
12458
12459/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12460FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12461{
12462 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12463 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12464}
12465
12466/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12467FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12468{
12469 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12470 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12471}
12472
12473/* Opcode 0xf3 0x0f 0xd1 - invalid */
12474/* Opcode 0xf2 0x0f 0xd1 - invalid */
12475
12476/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12477FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12478{
12479 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
12480 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12481}
12482
12483
12484/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12485FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12486{
12487 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12488 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12489}
12490
12491
12492/* Opcode 0xf3 0x0f 0xd2 - invalid */
12493/* Opcode 0xf2 0x0f 0xd2 - invalid */
12494
12495/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12496FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12497{
12498 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12499 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12500}
12501
12502
12503/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12504FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12505{
12506 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
12507 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12508}
12509
12510
12511/* Opcode 0xf3 0x0f 0xd3 - invalid */
12512/* Opcode 0xf2 0x0f 0xd3 - invalid */
12513
12514
12515/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12516FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12517{
12518 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12519 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_paddq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
12520}
12521
12522
12523/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12524FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12525{
12526 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12527 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
12528}
12529
12530
12531/* Opcode 0xf3 0x0f 0xd4 - invalid */
12532/* Opcode 0xf2 0x0f 0xd4 - invalid */
12533
12534/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12535FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12536{
12537 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12538 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
12539}
12540
12541/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12542FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12543{
12544 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12545 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
12546}
12547
12548
12549/* Opcode 0xf3 0x0f 0xd5 - invalid */
12550/* Opcode 0xf2 0x0f 0xd5 - invalid */
12551
12552/* Opcode 0x0f 0xd6 - invalid */
12553
12554/**
12555 * @opcode 0xd6
12556 * @oppfx 0x66
12557 * @opcpuid sse2
12558 * @opgroup og_sse2_pcksclr_datamove
12559 * @opxcpttype none
12560 * @optest op1=-1 op2=2 -> op1=2
12561 * @optest op1=0 op2=-42 -> op1=-42
12562 */
12563FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12564{
12565 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12566 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12567 if (IEM_IS_MODRM_REG_MODE(bRm))
12568 {
12569 /*
12570 * Register, register.
12571 */
12572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12573 IEM_MC_BEGIN(0, 2);
12574 IEM_MC_LOCAL(uint64_t, uSrc);
12575
12576 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12577 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12578
12579 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
12580 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
12581
12582 IEM_MC_ADVANCE_RIP();
12583 IEM_MC_END();
12584 }
12585 else
12586 {
12587 /*
12588 * Memory, register.
12589 */
12590 IEM_MC_BEGIN(0, 2);
12591 IEM_MC_LOCAL(uint64_t, uSrc);
12592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12593
12594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12596 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12597 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12598
12599 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
12600 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12601
12602 IEM_MC_ADVANCE_RIP();
12603 IEM_MC_END();
12604 }
12605 return VINF_SUCCESS;
12606}
12607
12608
12609/**
12610 * @opcode 0xd6
12611 * @opcodesub 11 mr/reg
12612 * @oppfx f3
12613 * @opcpuid sse2
12614 * @opgroup og_sse2_simdint_datamove
12615 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12616 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12617 */
12618FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
12619{
12620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12621 if (IEM_IS_MODRM_REG_MODE(bRm))
12622 {
12623 /*
12624 * Register, register.
12625 */
12626 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12628 IEM_MC_BEGIN(0, 1);
12629 IEM_MC_LOCAL(uint64_t, uSrc);
12630
12631 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12632 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12633 IEM_MC_FPU_TO_MMX_MODE();
12634
12635 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
12636 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
12637
12638 IEM_MC_ADVANCE_RIP();
12639 IEM_MC_END();
12640 return VINF_SUCCESS;
12641 }
12642
12643 /**
12644 * @opdone
12645 * @opmnemonic udf30fd6mem
12646 * @opcode 0xd6
12647 * @opcodesub !11 mr/reg
12648 * @oppfx f3
12649 * @opunused intel-modrm
12650 * @opcpuid sse
12651 * @optest ->
12652 */
12653 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12654}
12655
12656
12657/**
12658 * @opcode 0xd6
12659 * @opcodesub 11 mr/reg
12660 * @oppfx f2
12661 * @opcpuid sse2
12662 * @opgroup og_sse2_simdint_datamove
12663 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12664 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12665 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
12666 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
12667 * @optest op1=-42 op2=0xfedcba9876543210
12668 * -> op1=0xfedcba9876543210 ftw=0xff
12669 */
12670FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
12671{
12672 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12673 if (IEM_IS_MODRM_REG_MODE(bRm))
12674 {
12675 /*
12676 * Register, register.
12677 */
12678 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12680 IEM_MC_BEGIN(0, 1);
12681 IEM_MC_LOCAL(uint64_t, uSrc);
12682
12683 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12684 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12685 IEM_MC_FPU_TO_MMX_MODE();
12686
12687 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12688 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
12689
12690 IEM_MC_ADVANCE_RIP();
12691 IEM_MC_END();
12692 return VINF_SUCCESS;
12693 }
12694
12695 /**
12696 * @opdone
12697 * @opmnemonic udf20fd6mem
12698 * @opcode 0xd6
12699 * @opcodesub !11 mr/reg
12700 * @oppfx f2
12701 * @opunused intel-modrm
12702 * @opcpuid sse
12703 * @optest ->
12704 */
12705 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12706}
12707
12708
12709/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
12710FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
12711{
12712 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12713 /* Docs says register only. */
12714 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12715 {
12716 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12717 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS, 0);
12718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12719 IEM_MC_BEGIN(2, 0);
12720 IEM_MC_ARG(uint64_t *, puDst, 0);
12721 IEM_MC_ARG(uint64_t const *, puSrc, 1);
12722 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
12723 IEM_MC_PREPARE_FPU_USAGE();
12724 IEM_MC_FPU_TO_MMX_MODE();
12725
12726 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
12727 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
12728 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
12729
12730 IEM_MC_ADVANCE_RIP();
12731 IEM_MC_END();
12732 return VINF_SUCCESS;
12733 }
12734 return IEMOP_RAISE_INVALID_OPCODE();
12735}
12736
12737
12738/** Opcode 0x66 0x0f 0xd7 - */
12739FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
12740{
12741 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12742 /* Docs says register only. */
12743 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12744 {
12745 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12746 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
12747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12748 IEM_MC_BEGIN(2, 0);
12749 IEM_MC_ARG(uint64_t *, puDst, 0);
12750 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12751 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
12752 IEM_MC_PREPARE_SSE_USAGE();
12753 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12754 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12755 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
12756 IEM_MC_ADVANCE_RIP();
12757 IEM_MC_END();
12758 return VINF_SUCCESS;
12759 }
12760 return IEMOP_RAISE_INVALID_OPCODE();
12761}
12762
12763
12764/* Opcode 0xf3 0x0f 0xd7 - invalid */
12765/* Opcode 0xf2 0x0f 0xd7 - invalid */
12766
12767
12768/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
12769FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
12770{
12771 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12772 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
12773}
12774
12775
12776/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
12777FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
12778{
12779 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12780 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
12781}
12782
12783
12784/* Opcode 0xf3 0x0f 0xd8 - invalid */
12785/* Opcode 0xf2 0x0f 0xd8 - invalid */
12786
12787/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
12788FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
12789{
12790 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12791 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
12792}
12793
12794
12795/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
12796FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
12797{
12798 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12799 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
12800}
12801
12802
12803/* Opcode 0xf3 0x0f 0xd9 - invalid */
12804/* Opcode 0xf2 0x0f 0xd9 - invalid */
12805
12806/** Opcode 0x0f 0xda - pminub Pq, Qq */
12807FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
12808{
12809 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12810 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
12811}
12812
12813
12814/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
12815FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
12816{
12817 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12818 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
12819}
12820
12821/* Opcode 0xf3 0x0f 0xda - invalid */
12822/* Opcode 0xf2 0x0f 0xda - invalid */
12823
12824/** Opcode 0x0f 0xdb - pand Pq, Qq */
12825FNIEMOP_DEF(iemOp_pand_Pq_Qq)
12826{
12827 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12828 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
12829}
12830
12831
12832/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
12833FNIEMOP_DEF(iemOp_pand_Vx_Wx)
12834{
12835 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12836 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
12837}
12838
12839
12840/* Opcode 0xf3 0x0f 0xdb - invalid */
12841/* Opcode 0xf2 0x0f 0xdb - invalid */
12842
12843/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
12844FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
12845{
12846 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12847 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
12848}
12849
12850
12851/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
12852FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
12853{
12854 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12855 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
12856}
12857
12858
12859/* Opcode 0xf3 0x0f 0xdc - invalid */
12860/* Opcode 0xf2 0x0f 0xdc - invalid */
12861
12862/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
12863FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
12864{
12865 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12866 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
12867}
12868
12869
12870/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
12871FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
12872{
12873 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12874 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
12875}
12876
12877
12878/* Opcode 0xf3 0x0f 0xdd - invalid */
12879/* Opcode 0xf2 0x0f 0xdd - invalid */
12880
12881/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
12882FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
12883{
12884 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12885 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
12886}
12887
12888
12889/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
12890FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
12891{
12892 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12893 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
12894}
12895
12896/* Opcode 0xf3 0x0f 0xde - invalid */
12897/* Opcode 0xf2 0x0f 0xde - invalid */
12898
12899
12900/** Opcode 0x0f 0xdf - pandn Pq, Qq */
12901FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
12902{
12903 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12904 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
12905}
12906
12907
12908/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
12909FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
12910{
12911 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12912 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
12913}
12914
12915
12916/* Opcode 0xf3 0x0f 0xdf - invalid */
12917/* Opcode 0xf2 0x0f 0xdf - invalid */
12918
12919/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
12920FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
12921{
12922 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12923 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
12924}
12925
12926
12927/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
12928FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
12929{
12930 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12931 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
12932}
12933
12934
12935/* Opcode 0xf3 0x0f 0xe0 - invalid */
12936/* Opcode 0xf2 0x0f 0xe0 - invalid */
12937
12938/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
12939FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
12940{
12941 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12942 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
12943}
12944
12945
12946/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
12947FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
12948{
12949 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12950 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
12951}
12952
12953
12954/* Opcode 0xf3 0x0f 0xe1 - invalid */
12955/* Opcode 0xf2 0x0f 0xe1 - invalid */
12956
12957/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
12958FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
12959{
12960 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12961 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
12962}
12963
12964
12965/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
12966FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
12967{
12968 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12969 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
12970}
12971
12972
12973/* Opcode 0xf3 0x0f 0xe2 - invalid */
12974/* Opcode 0xf2 0x0f 0xe2 - invalid */
12975
12976/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
12977FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
12978{
12979 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12980 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
12981}
12982
12983
12984/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
12985FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
12986{
12987 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12988 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
12989}
12990
12991
12992/* Opcode 0xf3 0x0f 0xe3 - invalid */
12993/* Opcode 0xf2 0x0f 0xe3 - invalid */
12994
12995/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
12996FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
12997{
12998 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12999 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13000}
13001
13002
13003/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13004FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13005{
13006 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13007 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13008}
13009
13010
13011/* Opcode 0xf3 0x0f 0xe4 - invalid */
13012/* Opcode 0xf2 0x0f 0xe4 - invalid */
13013
13014/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13015FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13016{
13017 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13018 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
13019}
13020
13021
13022/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13023FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13024{
13025 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13026 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
13027}
13028
13029
13030/* Opcode 0xf3 0x0f 0xe5 - invalid */
13031/* Opcode 0xf2 0x0f 0xe5 - invalid */
13032/* Opcode 0x0f 0xe6 - invalid */
13033
13034
13035/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13036FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13037{
13038 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13039 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13040}
13041
13042
13043/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13044FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13045{
13046 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13047 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13048}
13049
13050
13051/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13052FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13053{
13054 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13055 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13056}
13057
13058
13059/**
13060 * @opcode 0xe7
13061 * @opcodesub !11 mr/reg
13062 * @oppfx none
13063 * @opcpuid sse
13064 * @opgroup og_sse1_cachect
13065 * @opxcpttype none
13066 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13067 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13068 */
13069FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13070{
13071 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13072 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13073 if (IEM_IS_MODRM_MEM_MODE(bRm))
13074 {
13075 /* Register, memory. */
13076 IEM_MC_BEGIN(0, 2);
13077 IEM_MC_LOCAL(uint64_t, uSrc);
13078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13079
13080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13082 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13083 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13084 IEM_MC_FPU_TO_MMX_MODE();
13085
13086 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13087 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13088
13089 IEM_MC_ADVANCE_RIP();
13090 IEM_MC_END();
13091 return VINF_SUCCESS;
13092 }
13093 /**
13094 * @opdone
13095 * @opmnemonic ud0fe7reg
13096 * @opcode 0xe7
13097 * @opcodesub 11 mr/reg
13098 * @oppfx none
13099 * @opunused immediate
13100 * @opcpuid sse
13101 * @optest ->
13102 */
13103 return IEMOP_RAISE_INVALID_OPCODE();
13104}
13105
13106/**
13107 * @opcode 0xe7
13108 * @opcodesub !11 mr/reg
13109 * @oppfx 0x66
13110 * @opcpuid sse2
13111 * @opgroup og_sse2_cachect
13112 * @opxcpttype 1
13113 * @optest op1=-1 op2=2 -> op1=2
13114 * @optest op1=0 op2=-42 -> op1=-42
13115 */
13116FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13117{
13118 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13120 if (IEM_IS_MODRM_MEM_MODE(bRm))
13121 {
13122 /* Register, memory. */
13123 IEM_MC_BEGIN(0, 2);
13124 IEM_MC_LOCAL(RTUINT128U, uSrc);
13125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13126
13127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13129 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
13130 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13131
13132 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13133 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13134
13135 IEM_MC_ADVANCE_RIP();
13136 IEM_MC_END();
13137 return VINF_SUCCESS;
13138 }
13139
13140 /**
13141 * @opdone
13142 * @opmnemonic ud660fe7reg
13143 * @opcode 0xe7
13144 * @opcodesub 11 mr/reg
13145 * @oppfx 0x66
13146 * @opunused immediate
13147 * @opcpuid sse
13148 * @optest ->
13149 */
13150 return IEMOP_RAISE_INVALID_OPCODE();
13151}
13152
13153/* Opcode 0xf3 0x0f 0xe7 - invalid */
13154/* Opcode 0xf2 0x0f 0xe7 - invalid */
13155
13156
13157/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13158FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13159{
13160 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13161 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
13162}
13163
13164
13165/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13166FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13167{
13168 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13169 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
13170}
13171
13172
13173/* Opcode 0xf3 0x0f 0xe8 - invalid */
13174/* Opcode 0xf2 0x0f 0xe8 - invalid */
13175
13176/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13177FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13178{
13179 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13180 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
13181}
13182
13183
13184/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13185FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13186{
13187 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13188 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
13189}
13190
13191
13192/* Opcode 0xf3 0x0f 0xe9 - invalid */
13193/* Opcode 0xf2 0x0f 0xe9 - invalid */
13194
13195
13196/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13197FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13198{
13199 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13200 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
13201}
13202
13203
13204/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13205FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13206{
13207 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13208 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
13209}
13210
13211
13212/* Opcode 0xf3 0x0f 0xea - invalid */
13213/* Opcode 0xf2 0x0f 0xea - invalid */
13214
13215
13216/** Opcode 0x0f 0xeb - por Pq, Qq */
13217FNIEMOP_DEF(iemOp_por_Pq_Qq)
13218{
13219 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13220 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
13221}
13222
13223
13224/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13225FNIEMOP_DEF(iemOp_por_Vx_Wx)
13226{
13227 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13228 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
13229}
13230
13231
13232/* Opcode 0xf3 0x0f 0xeb - invalid */
13233/* Opcode 0xf2 0x0f 0xeb - invalid */
13234
13235/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13236FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13237{
13238 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13239 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
13240}
13241
13242
13243/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13244FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13245{
13246 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13247 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
13248}
13249
13250
13251/* Opcode 0xf3 0x0f 0xec - invalid */
13252/* Opcode 0xf2 0x0f 0xec - invalid */
13253
13254/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13255FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13256{
13257 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13258 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
13259}
13260
13261
13262/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13263FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13264{
13265 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13266 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
13267}
13268
13269
13270/* Opcode 0xf3 0x0f 0xed - invalid */
13271/* Opcode 0xf2 0x0f 0xed - invalid */
13272
13273
13274/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13275FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13276{
13277 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13278 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13279}
13280
13281
13282/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13283FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13284{
13285 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13286 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13287}
13288
13289
13290/* Opcode 0xf3 0x0f 0xee - invalid */
13291/* Opcode 0xf2 0x0f 0xee - invalid */
13292
13293
13294/** Opcode 0x0f 0xef - pxor Pq, Qq */
13295FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13296{
13297 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13298 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
13299}
13300
13301
13302/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13303FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13304{
13305 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13306 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
13307}
13308
13309
13310/* Opcode 0xf3 0x0f 0xef - invalid */
13311/* Opcode 0xf2 0x0f 0xef - invalid */
13312
13313/* Opcode 0x0f 0xf0 - invalid */
13314/* Opcode 0x66 0x0f 0xf0 - invalid */
13315
13316
13317/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13318FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13319{
13320 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13321 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13322 if (IEM_IS_MODRM_REG_MODE(bRm))
13323 {
13324 /*
13325 * Register, register - (not implemented, assuming it raises \#UD).
13326 */
13327 return IEMOP_RAISE_INVALID_OPCODE();
13328 }
13329 else
13330 {
13331 /*
13332 * Register, memory.
13333 */
13334 IEM_MC_BEGIN(0, 2);
13335 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13337
13338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13340 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
13341 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13342 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13343 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13344
13345 IEM_MC_ADVANCE_RIP();
13346 IEM_MC_END();
13347 }
13348 return VINF_SUCCESS;
13349}
13350
13351
13352/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13353FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13354{
13355 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13356 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13357}
13358
13359
13360/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13361FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13362{
13363 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13364 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13365}
13366
13367
13368/* Opcode 0xf2 0x0f 0xf1 - invalid */
13369
13370/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13371FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13372{
13373 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13374 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13375}
13376
13377
13378/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13379FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13380{
13381 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13382 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13383}
13384
13385
13386/* Opcode 0xf2 0x0f 0xf2 - invalid */
13387
13388/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13389FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13390{
13391 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13392 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13393}
13394
13395
13396/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13397FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13398{
13399 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13400 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13401}
13402
13403/* Opcode 0xf2 0x0f 0xf3 - invalid */
13404
13405/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13406FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13407{
13408 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13409 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
13410}
13411
13412
13413/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13414FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13415{
13416 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13417 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
13418}
13419
13420
13421/* Opcode 0xf2 0x0f 0xf4 - invalid */
13422
13423/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13424FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13425{
13426 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
13427 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13428}
13429
13430
13431/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13432FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13433{
13434 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
13435 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13436}
13437
13438/* Opcode 0xf2 0x0f 0xf5 - invalid */
13439
13440/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13441FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13442{
13443 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13444 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13445}
13446
13447
13448/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13449FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13450{
13451 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13452 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13453}
13454
13455
13456/* Opcode 0xf2 0x0f 0xf6 - invalid */
13457
13458/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13459FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
13460/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13461FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
13462/* Opcode 0xf2 0x0f 0xf7 - invalid */
13463
13464
13465/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13466FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13467{
13468 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13469 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
13470}
13471
13472
13473/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13474FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13475{
13476 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13477 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
13478}
13479
13480
13481/* Opcode 0xf2 0x0f 0xf8 - invalid */
13482
13483
13484/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13485FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13486{
13487 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13488 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
13489}
13490
13491
13492/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13493FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13494{
13495 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13496 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
13497}
13498
13499
13500/* Opcode 0xf2 0x0f 0xf9 - invalid */
13501
13502
13503/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13504FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13505{
13506 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13507 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
13508}
13509
13510
13511/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13512FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13513{
13514 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13515 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
13516}
13517
13518
13519/* Opcode 0xf2 0x0f 0xfa - invalid */
13520
13521
13522/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13523FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13524{
13525 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13526 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_psubq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
13527}
13528
13529
13530/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13531FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13532{
13533 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13534 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
13535}
13536
13537
13538/* Opcode 0xf2 0x0f 0xfb - invalid */
13539
13540
13541/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13542FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13543{
13544 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13545 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
13546}
13547
13548
13549/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
13550FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
13551{
13552 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13553 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
13554}
13555
13556
13557/* Opcode 0xf2 0x0f 0xfc - invalid */
13558
13559
13560/** Opcode 0x0f 0xfd - paddw Pq, Qq */
13561FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
13562{
13563 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13564 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
13565}
13566
13567
13568/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
13569FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
13570{
13571 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13572 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
13573}
13574
13575
13576/* Opcode 0xf2 0x0f 0xfd - invalid */
13577
13578
13579/** Opcode 0x0f 0xfe - paddd Pq, Qq */
13580FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
13581{
13582 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13583 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
13584}
13585
13586
13587/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
13588FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
13589{
13590 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13591 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
13592}
13593
13594
13595/* Opcode 0xf2 0x0f 0xfe - invalid */
13596
13597
13598/** Opcode **** 0x0f 0xff - UD0 */
13599FNIEMOP_DEF(iemOp_ud0)
13600{
13601 IEMOP_MNEMONIC(ud0, "ud0");
13602 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
13603 {
13604 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
13605#ifndef TST_IEM_CHECK_MC
13606 if (IEM_IS_MODRM_MEM_MODE(bRm))
13607 {
13608 RTGCPTR GCPtrEff;
13609 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
13610 if (rcStrict != VINF_SUCCESS)
13611 return rcStrict;
13612 }
13613#endif
13614 IEMOP_HLP_DONE_DECODING();
13615 }
13616 return IEMOP_RAISE_INVALID_OPCODE();
13617}
13618
13619
13620
13621/**
13622 * Two byte opcode map, first byte 0x0f.
13623 *
13624 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
13625 * check if it needs updating as well when making changes.
13626 */
13627IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
13628{
13629 /* no prefix, 066h prefix f3h prefix, f2h prefix */
13630 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
13631 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
13632 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
13633 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
13634 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
13635 /* 0x05 */ IEMOP_X4(iemOp_syscall),
13636 /* 0x06 */ IEMOP_X4(iemOp_clts),
13637 /* 0x07 */ IEMOP_X4(iemOp_sysret),
13638 /* 0x08 */ IEMOP_X4(iemOp_invd),
13639 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
13640 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
13641 /* 0x0b */ IEMOP_X4(iemOp_ud2),
13642 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
13643 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
13644 /* 0x0e */ IEMOP_X4(iemOp_femms),
13645 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
13646
13647 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
13648 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
13649 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
13650 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13651 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13652 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13653 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
13654 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13655 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
13656 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
13657 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
13658 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
13659 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
13660 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
13661 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
13662 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
13663
13664 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
13665 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
13666 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
13667 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
13668 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
13669 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13670 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
13671 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13672 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13673 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13674 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
13675 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13676 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
13677 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
13678 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13679 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13680
13681 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
13682 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
13683 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
13684 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
13685 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
13686 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
13687 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
13688 /* 0x37 */ IEMOP_X4(iemOp_getsec),
13689 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
13690 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13691 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
13692 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13693 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13694 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13695 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13696 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13697
13698 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
13699 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
13700 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
13701 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
13702 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
13703 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
13704 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
13705 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
13706 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
13707 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
13708 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
13709 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
13710 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
13711 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
13712 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
13713 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
13714
13715 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13716 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
13717 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
13718 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
13719 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13720 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13721 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13722 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13723 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
13724 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
13725 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
13726 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
13727 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
13728 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
13729 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
13730 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
13731
13732 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13733 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13734 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13735 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13736 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13737 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13738 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13739 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13740 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13741 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13742 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13743 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13744 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13745 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13746 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13747 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
13748
13749 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
13750 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
13751 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
13752 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
13753 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13754 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13755 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13756 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13757
13758 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13759 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13760 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13761 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13762 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
13763 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
13764 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
13765 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
13766
13767 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
13768 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
13769 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
13770 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
13771 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
13772 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
13773 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
13774 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
13775 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
13776 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
13777 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
13778 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
13779 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
13780 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
13781 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
13782 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
13783
13784 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
13785 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
13786 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
13787 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
13788 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
13789 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
13790 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
13791 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
13792 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
13793 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
13794 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
13795 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
13796 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
13797 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
13798 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
13799 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
13800
13801 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
13802 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
13803 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
13804 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
13805 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
13806 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
13807 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
13808 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
13809 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
13810 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
13811 /* 0xaa */ IEMOP_X4(iemOp_rsm),
13812 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
13813 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
13814 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
13815 /* 0xae */ IEMOP_X4(iemOp_Grp15),
13816 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
13817
13818 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
13819 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
13820 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
13821 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
13822 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
13823 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
13824 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
13825 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
13826 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
13827 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
13828 /* 0xba */ IEMOP_X4(iemOp_Grp8),
13829 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
13830 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
13831 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
13832 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
13833 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
13834
13835 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
13836 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
13837 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
13838 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13839 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13840 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13841 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
13842 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
13843 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
13844 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
13845 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
13846 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
13847 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
13848 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
13849 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
13850 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
13851
13852 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
13853 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13854 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13855 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13856 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13857 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13858 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
13859 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13860 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13861 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13862 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13863 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13864 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13865 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13866 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13867 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13868
13869 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13870 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13871 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13872 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13873 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13874 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13875 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
13876 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13877 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13878 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13879 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13880 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13881 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13882 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13883 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13884 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13885
13886 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
13887 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13888 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13889 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13890 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13891 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13892 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13893 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13894 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13895 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13896 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13897 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13898 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13899 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13900 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13901 /* 0xff */ IEMOP_X4(iemOp_ud0),
13902};
13903AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
13904
13905/** @} */
13906
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette