VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstTwoByte0f.cpp.h@ 105261

Last change on this file since 105261 was 105173, checked in by vboxsync, 5 months ago

VMM/IEM: Implement native emitter for packuswb, bugref:10652

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 518.3 KB
Line 
1/* $Id: IEMAllInstTwoByte0f.cpp.h 105173 2024-07-07 13:04:51Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 *
42 * The @a pfnU64 worker function takes no FXSAVE state, just the operands.
43 */
44FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
45{
46 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
47 if (IEM_IS_MODRM_REG_MODE(bRm))
48 {
49 /*
50 * MMX, MMX.
51 */
52 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
53 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
54 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
55 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
56 IEM_MC_ARG(uint64_t *, pDst, 0);
57 IEM_MC_ARG(uint64_t const *, pSrc, 1);
58 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
59 IEM_MC_PREPARE_FPU_USAGE();
60 IEM_MC_FPU_TO_MMX_MODE();
61
62 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
63 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
64 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
65 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
66
67 IEM_MC_ADVANCE_RIP_AND_FINISH();
68 IEM_MC_END();
69 }
70 else
71 {
72 /*
73 * MMX, [mem64].
74 */
75 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
76 IEM_MC_ARG(uint64_t *, pDst, 0);
77 IEM_MC_LOCAL(uint64_t, uSrc);
78 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
79 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
80
81 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
82 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
83 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
84 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
85
86 IEM_MC_PREPARE_FPU_USAGE();
87 IEM_MC_FPU_TO_MMX_MODE();
88
89 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
90 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
91 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
92
93 IEM_MC_ADVANCE_RIP_AND_FINISH();
94 IEM_MC_END();
95 }
96}
97
98
99/**
100 * Common worker for MMX instructions on the form:
101 * pxxx mm1, mm2/mem64
102 * for instructions introduced with SSE.
103 *
104 * The @a pfnU64 worker function takes no FXSAVE state, just the operands.
105 */
106FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
107{
108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
109 if (IEM_IS_MODRM_REG_MODE(bRm))
110 {
111 /*
112 * MMX, MMX.
113 */
114 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
115 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
116 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
118 IEM_MC_ARG(uint64_t *, pDst, 0);
119 IEM_MC_ARG(uint64_t const *, pSrc, 1);
120 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
121 IEM_MC_PREPARE_FPU_USAGE();
122 IEM_MC_FPU_TO_MMX_MODE();
123
124 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
125 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
126 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
127 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
128
129 IEM_MC_ADVANCE_RIP_AND_FINISH();
130 IEM_MC_END();
131 }
132 else
133 {
134 /*
135 * MMX, [mem64].
136 */
137 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
138 IEM_MC_ARG(uint64_t *, pDst, 0);
139 IEM_MC_LOCAL(uint64_t, uSrc);
140 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
142
143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
145 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
146 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
147
148 IEM_MC_PREPARE_FPU_USAGE();
149 IEM_MC_FPU_TO_MMX_MODE();
150
151 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
152 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
153 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
154
155 IEM_MC_ADVANCE_RIP_AND_FINISH();
156 IEM_MC_END();
157 }
158}
159
160
161/**
162 * Common worker for MMX instructions on the form:
163 * pxxx mm1, mm2/mem64
164 * that was introduced with SSE2.
165 */
166FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
167{
168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
169 if (IEM_IS_MODRM_REG_MODE(bRm))
170 {
171 /*
172 * MMX, MMX.
173 */
174 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
175 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
176 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
178 IEM_MC_ARG(uint64_t *, pDst, 0);
179 IEM_MC_ARG(uint64_t const *, pSrc, 1);
180 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
181 IEM_MC_PREPARE_FPU_USAGE();
182 IEM_MC_FPU_TO_MMX_MODE();
183
184 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
185 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
186 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
187 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
188
189 IEM_MC_ADVANCE_RIP_AND_FINISH();
190 IEM_MC_END();
191 }
192 else
193 {
194 /*
195 * MMX, [mem64].
196 */
197 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
198 IEM_MC_ARG(uint64_t *, pDst, 0);
199 IEM_MC_LOCAL(uint64_t, uSrc);
200 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
202
203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
205 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
206 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
207
208 IEM_MC_PREPARE_FPU_USAGE();
209 IEM_MC_FPU_TO_MMX_MODE();
210
211 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
212 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
213 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
214
215 IEM_MC_ADVANCE_RIP_AND_FINISH();
216 IEM_MC_END();
217 }
218}
219
220
221/**
222 * Common worker for SSE instructions of the form:
223 * pxxx xmm1, xmm2/mem128
224 *
225 * Proper alignment of the 128-bit operand is enforced.
226 * SSE cpuid checks. No SIMD FP exceptions.
227 *
228 * The @a pfnU128 worker function takes no FXSAVE state, just the operands.
229 *
230 * @sa iemOpCommonSse2_FullFull_To_Full
231 */
232FNIEMOP_DEF_1(iemOpCommonSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
233{
234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
235 if (IEM_IS_MODRM_REG_MODE(bRm))
236 {
237 /*
238 * XMM, XMM.
239 */
240 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
242 IEM_MC_ARG(PRTUINT128U, pDst, 0);
243 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
244 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
245 IEM_MC_PREPARE_SSE_USAGE();
246 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
247 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
248 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
249 IEM_MC_ADVANCE_RIP_AND_FINISH();
250 IEM_MC_END();
251 }
252 else
253 {
254 /*
255 * XMM, [mem128].
256 */
257 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
258 IEM_MC_ARG(PRTUINT128U, pDst, 0);
259 IEM_MC_LOCAL(RTUINT128U, uSrc);
260 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
262
263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
265 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
266 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
267
268 IEM_MC_PREPARE_SSE_USAGE();
269 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
270 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
271
272 IEM_MC_ADVANCE_RIP_AND_FINISH();
273 IEM_MC_END();
274 }
275}
276
277
278/**
279 * Common worker for SSE2 instructions on the forms:
280 * pxxx xmm1, xmm2/mem128
281 *
282 * Proper alignment of the 128-bit operand is enforced.
283 * Exceptions type 4. SSE2 cpuid checks.
284 *
285 * The @a pfnU128 worker function takes no FXSAVE state, just the operands.
286 *
287 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
288 */
289FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
290{
291 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
292 if (IEM_IS_MODRM_REG_MODE(bRm))
293 {
294 /*
295 * XMM, XMM.
296 */
297 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
299 IEM_MC_ARG(PRTUINT128U, pDst, 0);
300 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
302 IEM_MC_PREPARE_SSE_USAGE();
303 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
304 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
305 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
306 IEM_MC_ADVANCE_RIP_AND_FINISH();
307 IEM_MC_END();
308 }
309 else
310 {
311 /*
312 * XMM, [mem128].
313 */
314 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
315 IEM_MC_ARG(PRTUINT128U, pDst, 0);
316 IEM_MC_LOCAL(RTUINT128U, uSrc);
317 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
319
320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
322 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
323 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
324
325 IEM_MC_PREPARE_SSE_USAGE();
326 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
327 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
328
329 IEM_MC_ADVANCE_RIP_AND_FINISH();
330 IEM_MC_END();
331 }
332}
333
334
335/**
336 * A body preprocessor variant of iemOpCommonSse2Opt_FullFull_To_Full in order
337 * to support native emitters for certain instructions.
338 */
339#define SSE2_OPT_BODY_FullFull_To_Full(a_Ins, a_pImplExpr, a_fRegNativeArchs, a_fMemNativeArchs) \
340 PFNIEMAIMPLMEDIAOPTF2U128 const pfnU128 = (a_pImplExpr); \
341 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
342 if (IEM_IS_MODRM_REG_MODE(bRm)) \
343 { \
344 /* \
345 * XMM, XMM. \
346 */ \
347 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2); \
349 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
350 IEM_MC_PREPARE_SSE_USAGE(); \
351 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
352 IEM_MC_NATIVE_EMIT_2(RT_CONCAT3(iemNativeEmit_,a_Ins,_rr_u128), IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); \
353 } IEM_MC_NATIVE_ELSE() { \
354 IEM_MC_ARG(PRTUINT128U, pDst, 0); \
355 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
356 IEM_MC_ARG(PCRTUINT128U, pSrc, 1); \
357 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
358 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc); \
359 } IEM_MC_NATIVE_ENDIF(); \
360 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
361 IEM_MC_END(); \
362 } \
363 else \
364 { \
365 /* \
366 * XMM, [mem128]. \
367 */ \
368 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
369 IEM_MC_LOCAL(RTUINT128U, uSrc); \
370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2); \
373 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
374 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
375 IEM_MC_PREPARE_SSE_USAGE(); \
376 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
377 IEM_MC_NATIVE_EMIT_2(RT_CONCAT3(iemNativeEmit_,a_Ins,_rv_u128), IEM_GET_MODRM_REG(pVCpu, bRm), uSrc); \
378 } IEM_MC_NATIVE_ELSE() { \
379 IEM_MC_ARG(PRTUINT128U, pDst, 0); \
380 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
381 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1); \
382 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc); \
383 } IEM_MC_NATIVE_ENDIF(); \
384 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
385 IEM_MC_END(); \
386 } void(0)
387
388
389/**
390 * Common worker for MMX instructions on the forms:
391 * pxxxx mm1, mm2/mem32
392 *
393 * The 2nd operand is the first half of a register, which in the memory case
394 * means a 32-bit memory access.
395 */
396FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
397{
398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
399 if (IEM_IS_MODRM_REG_MODE(bRm))
400 {
401 /*
402 * MMX, MMX.
403 */
404 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
406 IEM_MC_ARG(uint64_t *, puDst, 0);
407 IEM_MC_ARG(uint64_t const *, puSrc, 1);
408 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
409 IEM_MC_PREPARE_FPU_USAGE();
410 IEM_MC_FPU_TO_MMX_MODE();
411
412 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
413 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
414 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
415 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
416
417 IEM_MC_ADVANCE_RIP_AND_FINISH();
418 IEM_MC_END();
419 }
420 else
421 {
422 /*
423 * MMX, [mem32].
424 */
425 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
426 IEM_MC_ARG(uint64_t *, puDst, 0);
427 IEM_MC_LOCAL(uint64_t, uSrc);
428 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
430
431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
433 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
434 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
435
436 IEM_MC_PREPARE_FPU_USAGE();
437 IEM_MC_FPU_TO_MMX_MODE();
438
439 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
440 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
441 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
442
443 IEM_MC_ADVANCE_RIP_AND_FINISH();
444 IEM_MC_END();
445 }
446}
447
448
449/**
450 * Common worker for SSE instructions on the forms:
451 * pxxxx xmm1, xmm2/mem128
452 *
453 * The 2nd operand is the first half of a register, which in the memory case
454 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
455 *
456 * Exceptions type 4.
457 */
458FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
459{
460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
461 if (IEM_IS_MODRM_REG_MODE(bRm))
462 {
463 /*
464 * XMM, XMM.
465 */
466 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
468 IEM_MC_ARG(PRTUINT128U, puDst, 0);
469 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
470 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
471 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
472 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
473 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
474 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
475 IEM_MC_ADVANCE_RIP_AND_FINISH();
476 IEM_MC_END();
477 }
478 else
479 {
480 /*
481 * XMM, [mem128].
482 */
483 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
484 IEM_MC_ARG(PRTUINT128U, puDst, 0);
485 IEM_MC_LOCAL(RTUINT128U, uSrc);
486 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
488
489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
491 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
492 /** @todo Most CPUs probably only read the low qword. We read everything to
493 * make sure we apply segmentation and alignment checks correctly.
494 * When we have time, it would be interesting to explore what real
495 * CPUs actually does and whether it will do a TLB load for the high
496 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
497 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
498
499 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
500 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
501 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
502
503 IEM_MC_ADVANCE_RIP_AND_FINISH();
504 IEM_MC_END();
505 }
506}
507
508
509/**
510 * Common worker for SSE2 instructions on the forms:
511 * pxxxx xmm1, xmm2/mem128
512 *
513 * The 2nd operand is the first half of a register, which in the memory case
514 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
515 *
516 * Exceptions type 4.
517 */
518FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
519{
520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
521 if (IEM_IS_MODRM_REG_MODE(bRm))
522 {
523 /*
524 * XMM, XMM.
525 */
526 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
528 IEM_MC_ARG(PRTUINT128U, puDst, 0);
529 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
530 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
531 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
532 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
533 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
534 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
535 IEM_MC_ADVANCE_RIP_AND_FINISH();
536 IEM_MC_END();
537 }
538 else
539 {
540 /*
541 * XMM, [mem128].
542 */
543 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
544 IEM_MC_ARG(PRTUINT128U, puDst, 0);
545 IEM_MC_LOCAL(RTUINT128U, uSrc);
546 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
548
549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
551 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
552 /** @todo Most CPUs probably only read the low qword. We read everything to
553 * make sure we apply segmentation and alignment checks correctly.
554 * When we have time, it would be interesting to explore what real
555 * CPUs actually does and whether it will do a TLB load for the high
556 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
557 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
558
559 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
560 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
561 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
562
563 IEM_MC_ADVANCE_RIP_AND_FINISH();
564 IEM_MC_END();
565 }
566}
567
568
569/**
570 * Common worker for MMX instructions on the form:
571 * pxxxx mm1, mm2/mem64
572 *
573 * The 2nd operand is the second half of a register, which in the memory case
574 * means a 64-bit memory access for MMX.
575 */
576FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
577{
578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
579 if (IEM_IS_MODRM_REG_MODE(bRm))
580 {
581 /*
582 * MMX, MMX.
583 */
584 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
585 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
586 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
588 IEM_MC_ARG(uint64_t *, puDst, 0);
589 IEM_MC_ARG(uint64_t const *, puSrc, 1);
590 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
591 IEM_MC_PREPARE_FPU_USAGE();
592 IEM_MC_FPU_TO_MMX_MODE();
593
594 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
595 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
596 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
597 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
598
599 IEM_MC_ADVANCE_RIP_AND_FINISH();
600 IEM_MC_END();
601 }
602 else
603 {
604 /*
605 * MMX, [mem64].
606 */
607 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
608 IEM_MC_ARG(uint64_t *, puDst, 0);
609 IEM_MC_LOCAL(uint64_t, uSrc);
610 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
612
613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
615 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
616 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
617
618 IEM_MC_PREPARE_FPU_USAGE();
619 IEM_MC_FPU_TO_MMX_MODE();
620
621 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
622 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
623 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
624
625 IEM_MC_ADVANCE_RIP_AND_FINISH();
626 IEM_MC_END();
627 }
628}
629
630
631/**
632 * Common worker for SSE instructions on the form:
633 * pxxxx xmm1, xmm2/mem128
634 *
635 * The 2nd operand is the second half of a register, which for SSE a 128-bit
636 * aligned access where it may read the full 128 bits or only the upper 64 bits.
637 *
638 * Exceptions type 4.
639 */
640FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
641{
642 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
643 if (IEM_IS_MODRM_REG_MODE(bRm))
644 {
645 /*
646 * XMM, XMM.
647 */
648 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
650 IEM_MC_ARG(PRTUINT128U, puDst, 0);
651 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
652 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
653 IEM_MC_PREPARE_SSE_USAGE();
654 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
655 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
656 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
657 IEM_MC_ADVANCE_RIP_AND_FINISH();
658 IEM_MC_END();
659 }
660 else
661 {
662 /*
663 * XMM, [mem128].
664 */
665 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
666 IEM_MC_ARG(PRTUINT128U, puDst, 0);
667 IEM_MC_LOCAL(RTUINT128U, uSrc);
668 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
670
671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
673 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
674 /** @todo Most CPUs probably only read the high qword. We read everything to
675 * make sure we apply segmentation and alignment checks correctly.
676 * When we have time, it would be interesting to explore what real
677 * CPUs actually does and whether it will do a TLB load for the lower
678 * part or skip any associated \#PF. */
679 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
680
681 IEM_MC_PREPARE_SSE_USAGE();
682 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
683 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
684
685 IEM_MC_ADVANCE_RIP_AND_FINISH();
686 IEM_MC_END();
687 }
688}
689
690
691/**
692 * Common worker for SSE instructions on the forms:
693 * pxxs xmm1, xmm2/mem128
694 *
695 * Proper alignment of the 128-bit operand is enforced.
696 * Exceptions type 2. SSE cpuid checks.
697 *
698 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
699 */
700FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
701{
702 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
703 if (IEM_IS_MODRM_REG_MODE(bRm))
704 {
705 /*
706 * XMM128, XMM128.
707 */
708 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
710 IEM_MC_LOCAL(X86XMMREG, SseRes);
711 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
712 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
713 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
714 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
715 IEM_MC_PREPARE_SSE_USAGE();
716 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
717 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
718 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
719 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
720 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
721
722 IEM_MC_ADVANCE_RIP_AND_FINISH();
723 IEM_MC_END();
724 }
725 else
726 {
727 /*
728 * XMM128, [mem128].
729 */
730 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
731 IEM_MC_LOCAL(X86XMMREG, SseRes);
732 IEM_MC_LOCAL(X86XMMREG, uSrc2);
733 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
734 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
735 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
737
738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
740 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
741 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
742
743 IEM_MC_PREPARE_SSE_USAGE();
744 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
745 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
746 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
747 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
748
749 IEM_MC_ADVANCE_RIP_AND_FINISH();
750 IEM_MC_END();
751 }
752}
753
754
755/**
756 * Common worker for SSE instructions on the forms:
757 * pxxs xmm1, xmm2/mem32
758 *
759 * Proper alignment of the 128-bit operand is enforced.
760 * Exceptions type 3. SSE cpuid checks.
761 *
762 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
763 */
764FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
765{
766 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
767 if (IEM_IS_MODRM_REG_MODE(bRm))
768 {
769 /*
770 * XMM128, XMM32.
771 */
772 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
774 IEM_MC_LOCAL(X86XMMREG, SseRes);
775 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
776 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
777 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
778 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
779 IEM_MC_PREPARE_SSE_USAGE();
780 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
781 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
782 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
783 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
784 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
785
786 IEM_MC_ADVANCE_RIP_AND_FINISH();
787 IEM_MC_END();
788 }
789 else
790 {
791 /*
792 * XMM128, [mem32].
793 */
794 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
795 IEM_MC_LOCAL(X86XMMREG, SseRes);
796 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
797 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
798 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
799 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
801
802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
804 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
805 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
806
807 IEM_MC_PREPARE_SSE_USAGE();
808 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
809 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
810 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
811 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
812
813 IEM_MC_ADVANCE_RIP_AND_FINISH();
814 IEM_MC_END();
815 }
816}
817
818
819/**
820 * Common worker for SSE2 instructions on the forms:
821 * pxxd xmm1, xmm2/mem128
822 *
823 * Proper alignment of the 128-bit operand is enforced.
824 * Exceptions type 2. SSE cpuid checks.
825 *
826 * @sa iemOpCommonSseFp_FullFull_To_Full
827 */
828FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
829{
830 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
831 if (IEM_IS_MODRM_REG_MODE(bRm))
832 {
833 /*
834 * XMM128, XMM128.
835 */
836 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
838 IEM_MC_LOCAL(X86XMMREG, SseRes);
839 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
840 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
841 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
842 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
843 IEM_MC_PREPARE_SSE_USAGE();
844 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
845 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
846 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
847 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
848 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
849
850 IEM_MC_ADVANCE_RIP_AND_FINISH();
851 IEM_MC_END();
852 }
853 else
854 {
855 /*
856 * XMM128, [mem128].
857 */
858 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
859 IEM_MC_LOCAL(X86XMMREG, SseRes);
860 IEM_MC_LOCAL(X86XMMREG, uSrc2);
861 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
862 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
863 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
864 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
865
866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
868 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
869 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
870
871 IEM_MC_PREPARE_SSE_USAGE();
872 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
873 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
874 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
875 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
876
877 IEM_MC_ADVANCE_RIP_AND_FINISH();
878 IEM_MC_END();
879 }
880}
881
882
883/**
884 * Common worker for SSE2 instructions on the forms:
885 * pxxs xmm1, xmm2/mem64
886 *
887 * Proper alignment of the 128-bit operand is enforced.
888 * Exceptions type 3. SSE2 cpuid checks.
889 *
890 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
891 */
892FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
893{
894 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
895 if (IEM_IS_MODRM_REG_MODE(bRm))
896 {
897 /*
898 * XMM, XMM.
899 */
900 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
902 IEM_MC_LOCAL(X86XMMREG, SseRes);
903 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
904 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
905 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
906 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
907 IEM_MC_PREPARE_SSE_USAGE();
908 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
909 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
910 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
911 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
912 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
913
914 IEM_MC_ADVANCE_RIP_AND_FINISH();
915 IEM_MC_END();
916 }
917 else
918 {
919 /*
920 * XMM, [mem64].
921 */
922 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
923 IEM_MC_LOCAL(X86XMMREG, SseRes);
924 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
925 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
926 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
927 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
928 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
929
930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
932 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
933 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
934
935 IEM_MC_PREPARE_SSE_USAGE();
936 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
937 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
938 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
939 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
940
941 IEM_MC_ADVANCE_RIP_AND_FINISH();
942 IEM_MC_END();
943 }
944}
945
946
947/**
948 * Common worker for SSE2 instructions on the form:
949 * pxxxx xmm1, xmm2/mem128
950 *
951 * The 2nd operand is the second half of a register, which for SSE a 128-bit
952 * aligned access where it may read the full 128 bits or only the upper 64 bits.
953 *
954 * Exceptions type 4.
955 */
956FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
957{
958 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
959 if (IEM_IS_MODRM_REG_MODE(bRm))
960 {
961 /*
962 * XMM, XMM.
963 */
964 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
966 IEM_MC_ARG(PRTUINT128U, puDst, 0);
967 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
968 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
969 IEM_MC_PREPARE_SSE_USAGE();
970 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
971 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
972 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
973 IEM_MC_ADVANCE_RIP_AND_FINISH();
974 IEM_MC_END();
975 }
976 else
977 {
978 /*
979 * XMM, [mem128].
980 */
981 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
982 IEM_MC_ARG(PRTUINT128U, puDst, 0);
983 IEM_MC_LOCAL(RTUINT128U, uSrc);
984 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
986
987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
989 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
990 /** @todo Most CPUs probably only read the high qword. We read everything to
991 * make sure we apply segmentation and alignment checks correctly.
992 * When we have time, it would be interesting to explore what real
993 * CPUs actually does and whether it will do a TLB load for the lower
994 * part or skip any associated \#PF. */
995 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
996
997 IEM_MC_PREPARE_SSE_USAGE();
998 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
999 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1000
1001 IEM_MC_ADVANCE_RIP_AND_FINISH();
1002 IEM_MC_END();
1003 }
1004}
1005
1006
1007/**
1008 * Common worker for SSE3 instructions on the forms:
1009 * hxxx xmm1, xmm2/mem128
1010 *
1011 * Proper alignment of the 128-bit operand is enforced.
1012 * Exceptions type 2. SSE3 cpuid checks.
1013 *
1014 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1015 */
1016FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1017{
1018 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1019 if (IEM_IS_MODRM_REG_MODE(bRm))
1020 {
1021 /*
1022 * XMM, XMM.
1023 */
1024 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1026 IEM_MC_LOCAL(X86XMMREG, SseRes);
1027 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
1028 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1029 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1030 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1031 IEM_MC_PREPARE_SSE_USAGE();
1032 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1033 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1034 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1035 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1036 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
1037
1038 IEM_MC_ADVANCE_RIP_AND_FINISH();
1039 IEM_MC_END();
1040 }
1041 else
1042 {
1043 /*
1044 * XMM, [mem128].
1045 */
1046 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1047 IEM_MC_LOCAL(X86XMMREG, SseRes);
1048 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1049 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
1050 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1051 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1053
1054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1056 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1057 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1058
1059 IEM_MC_PREPARE_SSE_USAGE();
1060 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1061 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1062 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1063 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
1064
1065 IEM_MC_ADVANCE_RIP_AND_FINISH();
1066 IEM_MC_END();
1067 }
1068}
1069
1070
1071/** Opcode 0x0f 0x00 /0. */
1072FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1073{
1074 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1075 IEMOP_HLP_MIN_286();
1076 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1077
1078 if (IEM_IS_MODRM_REG_MODE(bRm))
1079 {
1080 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1081 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1082 iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1083 }
1084
1085 /* Ignore operand size here, memory refs are always 16-bit. */
1086 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1087 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1089 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1090 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1091 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1092 IEM_MC_END();
1093}
1094
1095
1096/** Opcode 0x0f 0x00 /1. */
1097FNIEMOPRM_DEF(iemOp_Grp6_str)
1098{
1099 IEMOP_MNEMONIC(str, "str Rv/Mw");
1100 IEMOP_HLP_MIN_286();
1101 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1102
1103
1104 if (IEM_IS_MODRM_REG_MODE(bRm))
1105 {
1106 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1107 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1108 iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1109 }
1110
1111 /* Ignore operand size here, memory refs are always 16-bit. */
1112 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1113 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1114 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1115 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1116 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1117 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1118 IEM_MC_END();
1119}
1120
1121
1122/** Opcode 0x0f 0x00 /2. */
1123FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1124{
1125 IEMOP_MNEMONIC(lldt, "lldt Ew");
1126 IEMOP_HLP_MIN_286();
1127 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1128
1129 if (IEM_IS_MODRM_REG_MODE(bRm))
1130 {
1131 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1132 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1133 IEM_MC_ARG(uint16_t, u16Sel, 0);
1134 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1135 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1136 IEM_MC_END();
1137 }
1138 else
1139 {
1140 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1141 IEM_MC_ARG(uint16_t, u16Sel, 0);
1142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1144 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1145 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1146 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1147 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1148 IEM_MC_END();
1149 }
1150}
1151
1152
1153/** Opcode 0x0f 0x00 /3. */
1154FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1155{
1156 IEMOP_MNEMONIC(ltr, "ltr Ew");
1157 IEMOP_HLP_MIN_286();
1158 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1159
1160 if (IEM_IS_MODRM_REG_MODE(bRm))
1161 {
1162 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1164 IEM_MC_ARG(uint16_t, u16Sel, 0);
1165 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1166 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1167 IEM_MC_END();
1168 }
1169 else
1170 {
1171 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1172 IEM_MC_ARG(uint16_t, u16Sel, 0);
1173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1176 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1177 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1178 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1179 IEM_MC_END();
1180 }
1181}
1182
1183
1184/* Need to associate flag info with the blocks, so duplicate the code. */
1185#define IEMOP_BODY_GRP6_VERX(bRm, fWrite) \
1186 IEMOP_HLP_MIN_286(); \
1187 IEMOP_HLP_NO_REAL_OR_V86_MODE(); \
1188 \
1189 if (IEM_IS_MODRM_REG_MODE(bRm)) \
1190 { \
1191 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0); \
1192 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); \
1193 IEM_MC_ARG(uint16_t, u16Sel, 0); \
1194 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1); \
1195 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1196 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg); \
1197 IEM_MC_END(); \
1198 } \
1199 else \
1200 { \
1201 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0); \
1202 IEM_MC_ARG(uint16_t, u16Sel, 0); \
1203 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1); \
1204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1206 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); \
1207 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1208 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg); \
1209 IEM_MC_END(); \
1210 } (void)0
1211
1212/**
1213 * @opmaps grp6
1214 * @opcode /4
1215 * @opflmodify zf
1216 */
1217FNIEMOPRM_DEF(iemOp_Grp6_verr)
1218{
1219 IEMOP_MNEMONIC(verr, "verr Ew");
1220 IEMOP_BODY_GRP6_VERX(bRm, false);
1221}
1222
1223
1224/**
1225 * @opmaps grp6
1226 * @opcode /5
1227 * @opflmodify zf
1228 */
1229FNIEMOPRM_DEF(iemOp_Grp6_verw)
1230{
1231 IEMOP_MNEMONIC(verw, "verw Ew");
1232 IEMOP_BODY_GRP6_VERX(bRm, true);
1233}
1234
1235
1236/**
1237 * Group 6 jump table.
1238 */
1239IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1240{
1241 iemOp_Grp6_sldt,
1242 iemOp_Grp6_str,
1243 iemOp_Grp6_lldt,
1244 iemOp_Grp6_ltr,
1245 iemOp_Grp6_verr,
1246 iemOp_Grp6_verw,
1247 iemOp_InvalidWithRM,
1248 iemOp_InvalidWithRM
1249};
1250
1251/** Opcode 0x0f 0x00. */
1252FNIEMOP_DEF(iemOp_Grp6)
1253{
1254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1255 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1256}
1257
1258
1259/** Opcode 0x0f 0x01 /0. */
1260FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1261{
1262 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1263 IEMOP_HLP_MIN_286();
1264 IEMOP_HLP_64BIT_OP_SIZE();
1265 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1266 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1269 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1270 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1271 IEM_MC_END();
1272}
1273
1274
1275/** Opcode 0x0f 0x01 /0. */
1276FNIEMOP_DEF(iemOp_Grp7_vmcall)
1277{
1278 IEMOP_MNEMONIC(vmcall, "vmcall");
1279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1280
1281 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1282 want all hypercalls regardless of instruction used, and if a
1283 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1284 (NEM/win makes ASSUMPTIONS about this behavior.) */
1285 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmcall);
1286}
1287
1288
1289/** Opcode 0x0f 0x01 /0. */
1290#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1291FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1292{
1293 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1294 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1295 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1296 IEMOP_HLP_DONE_DECODING();
1297 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1298 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1299 iemCImpl_vmlaunch);
1300}
1301#else
1302FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1303{
1304 IEMOP_BITCH_ABOUT_STUB();
1305 IEMOP_RAISE_INVALID_OPCODE_RET();
1306}
1307#endif
1308
1309
1310/** Opcode 0x0f 0x01 /0. */
1311#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1312FNIEMOP_DEF(iemOp_Grp7_vmresume)
1313{
1314 IEMOP_MNEMONIC(vmresume, "vmresume");
1315 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1316 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1317 IEMOP_HLP_DONE_DECODING();
1318 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1319 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1320 iemCImpl_vmresume);
1321}
1322#else
1323FNIEMOP_DEF(iemOp_Grp7_vmresume)
1324{
1325 IEMOP_BITCH_ABOUT_STUB();
1326 IEMOP_RAISE_INVALID_OPCODE_RET();
1327}
1328#endif
1329
1330
1331/** Opcode 0x0f 0x01 /0. */
1332#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1333FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1334{
1335 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1336 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1337 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1338 IEMOP_HLP_DONE_DECODING();
1339 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmxoff);
1340}
1341#else
1342FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1343{
1344 IEMOP_BITCH_ABOUT_STUB();
1345 IEMOP_RAISE_INVALID_OPCODE_RET();
1346}
1347#endif
1348
1349
1350/** Opcode 0x0f 0x01 /1. */
1351FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1352{
1353 IEMOP_MNEMONIC(sidt, "sidt Ms");
1354 IEMOP_HLP_MIN_286();
1355 IEMOP_HLP_64BIT_OP_SIZE();
1356 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1357 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1360 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1361 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1362 IEM_MC_END();
1363}
1364
1365
1366/** Opcode 0x0f 0x01 /1. */
1367FNIEMOP_DEF(iemOp_Grp7_monitor)
1368{
1369 IEMOP_MNEMONIC(monitor, "monitor");
1370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1371 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1372}
1373
1374
1375/** Opcode 0x0f 0x01 /1. */
1376FNIEMOP_DEF(iemOp_Grp7_mwait)
1377{
1378 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1380 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_mwait);
1381}
1382
1383
1384/** Opcode 0x0f 0x01 /2. */
1385FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1386{
1387 IEMOP_MNEMONIC(lgdt, "lgdt");
1388 IEMOP_HLP_64BIT_OP_SIZE();
1389 IEM_MC_BEGIN(0, 0);
1390 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1393 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1394 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1395 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1396 IEM_MC_END();
1397}
1398
1399
1400/** Opcode 0x0f 0x01 0xd0. */
1401FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1402{
1403 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1404 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1405 {
1406 /** @todo r=ramshankar: We should use
1407 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1408 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1409/** @todo testcase: test prefixes and exceptions. currently not checking for the
1410 * OPSIZE one ... */
1411 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1412 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
1413 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1414 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
1415 iemCImpl_xgetbv);
1416 }
1417 IEMOP_RAISE_INVALID_OPCODE_RET();
1418}
1419
1420
1421/** Opcode 0x0f 0x01 0xd1. */
1422FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1423{
1424 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1425 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1426 {
1427 /** @todo r=ramshankar: We should use
1428 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1429 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1430/** @todo testcase: test prefixes and exceptions. currently not checking for the
1431 * OPSIZE one ... */
1432 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1433 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_xsetbv);
1434 }
1435 IEMOP_RAISE_INVALID_OPCODE_RET();
1436}
1437
1438
1439/** Opcode 0x0f 0x01 /3. */
1440FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1441{
1442 IEMOP_MNEMONIC(lidt, "lidt");
1443 IEMMODE enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : pVCpu->iem.s.enmEffOpSize;
1444 IEM_MC_BEGIN(0, 0);
1445 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1448 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1449 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg, /*=*/ enmEffOpSize, 2);
1450 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1451 IEM_MC_END();
1452}
1453
1454
1455/** Opcode 0x0f 0x01 0xd8. */
1456#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1457FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1458{
1459 IEMOP_MNEMONIC(vmrun, "vmrun");
1460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1461 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1462 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1463 iemCImpl_vmrun);
1464}
1465#else
1466FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1467#endif
1468
1469/** Opcode 0x0f 0x01 0xd9. */
1470FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1471{
1472 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1473 /** @todo r=bird: Table A-8 on page 524 in vol 3 has VMGEXIT for this
1474 * opcode sequence when F3 or F2 is used as prefix. So, the assumtion
1475 * here cannot be right... */
1476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1477
1478 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1479 want all hypercalls regardless of instruction used, and if a
1480 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1481 (NEM/win makes ASSUMPTIONS about this behavior.) */
1482 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmmcall);
1483}
1484
1485/** Opcode 0x0f 0x01 0xda. */
1486#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1487FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1488{
1489 IEMOP_MNEMONIC(vmload, "vmload");
1490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1491 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmload);
1492}
1493#else
1494FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1495#endif
1496
1497
1498/** Opcode 0x0f 0x01 0xdb. */
1499#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1500FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1501{
1502 IEMOP_MNEMONIC(vmsave, "vmsave");
1503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1504 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmsave);
1505}
1506#else
1507FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1508#endif
1509
1510
1511/** Opcode 0x0f 0x01 0xdc. */
1512#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1513FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1514{
1515 IEMOP_MNEMONIC(stgi, "stgi");
1516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1517 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_stgi);
1518}
1519#else
1520FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1521#endif
1522
1523
1524/** Opcode 0x0f 0x01 0xdd. */
1525#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1526FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1527{
1528 IEMOP_MNEMONIC(clgi, "clgi");
1529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1530 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clgi);
1531}
1532#else
1533FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1534#endif
1535
1536
1537/** Opcode 0x0f 0x01 0xdf. */
1538#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1539FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1540{
1541 IEMOP_MNEMONIC(invlpga, "invlpga");
1542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1543 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpga);
1544}
1545#else
1546FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1547#endif
1548
1549
1550/** Opcode 0x0f 0x01 0xde. */
1551#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1552FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1553{
1554 IEMOP_MNEMONIC(skinit, "skinit");
1555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1556 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_skinit);
1557}
1558#else
1559FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1560#endif
1561
1562
1563/** Opcode 0x0f 0x01 /4. */
1564FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1565{
1566 IEMOP_MNEMONIC(smsw, "smsw");
1567 IEMOP_HLP_MIN_286();
1568 if (IEM_IS_MODRM_REG_MODE(bRm))
1569 {
1570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1571 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1572 iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1573 }
1574
1575 /* Ignore operand size here, memory refs are always 16-bit. */
1576 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1577 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1580 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1581 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1582 IEM_MC_END();
1583}
1584
1585
1586/** Opcode 0x0f 0x01 /6. */
1587FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1588{
1589 /* The operand size is effectively ignored, all is 16-bit and only the
1590 lower 3-bits are used. */
1591 IEMOP_MNEMONIC(lmsw, "lmsw");
1592 IEMOP_HLP_MIN_286();
1593 if (IEM_IS_MODRM_REG_MODE(bRm))
1594 {
1595 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1597 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1598 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1599 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1600 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0),
1601 iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1602 IEM_MC_END();
1603 }
1604 else
1605 {
1606 IEM_MC_BEGIN(IEM_MC_F_MIN_286, 0);
1607 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1608 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1611 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1612 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0),
1613 iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1614 IEM_MC_END();
1615 }
1616}
1617
1618
1619/** Opcode 0x0f 0x01 /7. */
1620FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1621{
1622 IEMOP_MNEMONIC(invlpg, "invlpg");
1623 IEMOP_HLP_MIN_486();
1624 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1625 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1628 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpg, GCPtrEffDst);
1629 IEM_MC_END();
1630}
1631
1632
1633/** Opcode 0x0f 0x01 0xf8. */
1634FNIEMOP_DEF(iemOp_Grp7_swapgs)
1635{
1636 IEMOP_MNEMONIC(swapgs, "swapgs");
1637 IEMOP_HLP_ONLY_64BIT();
1638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1639 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS), iemCImpl_swapgs);
1640}
1641
1642
1643/** Opcode 0x0f 0x01 0xf9. */
1644FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1645{
1646 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1648 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
1649 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1650 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
1651 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
1652 iemCImpl_rdtscp);
1653}
1654
1655
1656/**
1657 * Group 7 jump table, memory variant.
1658 */
1659IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1660{
1661 iemOp_Grp7_sgdt,
1662 iemOp_Grp7_sidt,
1663 iemOp_Grp7_lgdt,
1664 iemOp_Grp7_lidt,
1665 iemOp_Grp7_smsw,
1666 iemOp_InvalidWithRM,
1667 iemOp_Grp7_lmsw,
1668 iemOp_Grp7_invlpg
1669};
1670
1671
1672/** Opcode 0x0f 0x01. */
1673FNIEMOP_DEF(iemOp_Grp7)
1674{
1675 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1676 if (IEM_IS_MODRM_MEM_MODE(bRm))
1677 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1678
1679 switch (IEM_GET_MODRM_REG_8(bRm))
1680 {
1681 case 0:
1682 switch (IEM_GET_MODRM_RM_8(bRm))
1683 {
1684 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1685 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1686 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1687 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1688 }
1689 IEMOP_RAISE_INVALID_OPCODE_RET();
1690
1691 case 1:
1692 switch (IEM_GET_MODRM_RM_8(bRm))
1693 {
1694 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1695 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1696 }
1697 IEMOP_RAISE_INVALID_OPCODE_RET();
1698
1699 case 2:
1700 switch (IEM_GET_MODRM_RM_8(bRm))
1701 {
1702 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1703 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1704 }
1705 IEMOP_RAISE_INVALID_OPCODE_RET();
1706
1707 case 3:
1708 switch (IEM_GET_MODRM_RM_8(bRm))
1709 {
1710 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1711 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1712 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1713 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1714 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1715 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1716 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1717 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1718 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1719 }
1720
1721 case 4:
1722 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1723
1724 case 5:
1725 IEMOP_RAISE_INVALID_OPCODE_RET();
1726
1727 case 6:
1728 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1729
1730 case 7:
1731 switch (IEM_GET_MODRM_RM_8(bRm))
1732 {
1733 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1734 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1735 }
1736 IEMOP_RAISE_INVALID_OPCODE_RET();
1737
1738 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1739 }
1740}
1741
1742FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1743{
1744 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1745 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1746
1747 if (IEM_IS_MODRM_REG_MODE(bRm))
1748 {
1749 switch (pVCpu->iem.s.enmEffOpSize)
1750 {
1751 case IEMMODE_16BIT:
1752 IEM_MC_BEGIN(0, 0);
1753 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1754 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1755 IEM_MC_ARG(uint16_t, u16Sel, 1);
1756 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1757
1758 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1759 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1760 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1761 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1762
1763 IEM_MC_END();
1764 break;
1765
1766 case IEMMODE_32BIT:
1767 case IEMMODE_64BIT:
1768 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1769 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1770 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1771 IEM_MC_ARG(uint16_t, u16Sel, 1);
1772 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1773
1774 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1775 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1776 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1777 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1778
1779 IEM_MC_END();
1780 break;
1781
1782 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1783 }
1784 }
1785 else
1786 {
1787 switch (pVCpu->iem.s.enmEffOpSize)
1788 {
1789 case IEMMODE_16BIT:
1790 IEM_MC_BEGIN(0, 0);
1791 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1792 IEM_MC_ARG(uint16_t, u16Sel, 1);
1793 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1795
1796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1797 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1798
1799 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1800 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1801 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1802 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1803
1804 IEM_MC_END();
1805 break;
1806
1807 case IEMMODE_32BIT:
1808 case IEMMODE_64BIT:
1809 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
1810 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1811 IEM_MC_ARG(uint16_t, u16Sel, 1);
1812 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1814
1815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1816 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1817/** @todo testcase: make sure it's a 16-bit read. */
1818
1819 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1820 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1821 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1822 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1823
1824 IEM_MC_END();
1825 break;
1826
1827 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1828 }
1829 }
1830}
1831
1832
1833
1834/**
1835 * @opcode 0x02
1836 * @opflmodify zf
1837 */
1838FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1839{
1840 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1841 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1842}
1843
1844
1845/**
1846 * @opcode 0x03
1847 * @opflmodify zf
1848 */
1849FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1850{
1851 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1852 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1853}
1854
1855
1856/** Opcode 0x0f 0x05. */
1857FNIEMOP_DEF(iemOp_syscall)
1858{
1859 if (RT_LIKELY(pVCpu->iem.s.uTargetCpu != IEMTARGETCPU_286))
1860 {
1861 IEMOP_MNEMONIC(syscall, "syscall");
1862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1863 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1864 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0, iemCImpl_syscall);
1865 }
1866 else
1867 {
1868 IEMOP_MNEMONIC(loadall286, "loadall286");
1869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1870 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1871 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1872 RT_BIT_64(kIemNativeGstReg_Cr0), iemCImpl_loadall286);
1873 }
1874}
1875
1876
1877/** Opcode 0x0f 0x06. */
1878FNIEMOP_DEF(iemOp_clts)
1879{
1880 IEMOP_MNEMONIC(clts, "clts");
1881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1882 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0), iemCImpl_clts);
1883}
1884
1885
1886/** Opcode 0x0f 0x07. */
1887FNIEMOP_DEF(iemOp_sysret)
1888{
1889 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1891 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1892 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
1893 iemCImpl_sysret, pVCpu->iem.s.enmEffOpSize);
1894}
1895
1896
1897/** Opcode 0x0f 0x08. */
1898FNIEMOP_DEF(iemOp_invd)
1899{
1900 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1901 IEMOP_HLP_MIN_486();
1902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1903 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invd);
1904}
1905
1906
1907/** Opcode 0x0f 0x09. */
1908FNIEMOP_DEF(iemOp_wbinvd)
1909{
1910 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1911 IEMOP_HLP_MIN_486();
1912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1913 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wbinvd);
1914}
1915
1916
1917/** Opcode 0x0f 0x0b. */
1918FNIEMOP_DEF(iemOp_ud2)
1919{
1920 IEMOP_MNEMONIC(ud2, "ud2");
1921 IEMOP_RAISE_INVALID_OPCODE_RET();
1922}
1923
1924/** Opcode 0x0f 0x0d. */
1925FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1926{
1927 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1928 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1929 {
1930 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1931 IEMOP_RAISE_INVALID_OPCODE_RET();
1932 }
1933
1934 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1935 if (IEM_IS_MODRM_REG_MODE(bRm))
1936 {
1937 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1938 IEMOP_RAISE_INVALID_OPCODE_RET();
1939 }
1940
1941 switch (IEM_GET_MODRM_REG_8(bRm))
1942 {
1943 case 2: /* Aliased to /0 for the time being. */
1944 case 4: /* Aliased to /0 for the time being. */
1945 case 5: /* Aliased to /0 for the time being. */
1946 case 6: /* Aliased to /0 for the time being. */
1947 case 7: /* Aliased to /0 for the time being. */
1948 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1949 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1950 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1951 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1952 }
1953
1954 IEM_MC_BEGIN(0, 0);
1955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1958 /* Currently a NOP. */
1959 IEM_MC_NOREF(GCPtrEffSrc);
1960 IEM_MC_ADVANCE_RIP_AND_FINISH();
1961 IEM_MC_END();
1962}
1963
1964
1965/** Opcode 0x0f 0x0e. */
1966FNIEMOP_DEF(iemOp_femms)
1967{
1968 IEMOP_MNEMONIC(femms, "femms");
1969
1970 IEM_MC_BEGIN(0, 0);
1971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1972 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
1973 IEM_MC_MAYBE_RAISE_FPU_XCPT();
1974 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
1975 IEM_MC_FPU_FROM_MMX_MODE();
1976 IEM_MC_ADVANCE_RIP_AND_FINISH();
1977 IEM_MC_END();
1978}
1979
1980
1981/** Opcode 0x0f 0x0f. */
1982FNIEMOP_DEF(iemOp_3Dnow)
1983{
1984 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1985 {
1986 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1987 IEMOP_RAISE_INVALID_OPCODE_RET();
1988 }
1989
1990#ifdef IEM_WITH_3DNOW
1991 /* This is pretty sparse, use switch instead of table. */
1992 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1993 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
1994#else
1995 IEMOP_BITCH_ABOUT_STUB();
1996 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1997#endif
1998}
1999
2000
2001/**
2002 * @opcode 0x10
2003 * @oppfx none
2004 * @opcpuid sse
2005 * @opgroup og_sse_simdfp_datamove
2006 * @opxcpttype 4UA
2007 * @optest op1=1 op2=2 -> op1=2
2008 * @optest op1=0 op2=-22 -> op1=-22
2009 */
2010FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2011{
2012 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2013 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2014 if (IEM_IS_MODRM_REG_MODE(bRm))
2015 {
2016 /*
2017 * XMM128, XMM128.
2018 */
2019 IEM_MC_BEGIN(0, 0);
2020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2021 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2022 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2023 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2024 IEM_GET_MODRM_RM(pVCpu, bRm));
2025 IEM_MC_ADVANCE_RIP_AND_FINISH();
2026 IEM_MC_END();
2027 }
2028 else
2029 {
2030 /*
2031 * XMM128, [mem128].
2032 */
2033 IEM_MC_BEGIN(0, 0);
2034 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2036
2037 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2039 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2040 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2041
2042 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2043 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2044
2045 IEM_MC_ADVANCE_RIP_AND_FINISH();
2046 IEM_MC_END();
2047 }
2048
2049}
2050
2051
2052/**
2053 * @opcode 0x10
2054 * @oppfx 0x66
2055 * @opcpuid sse2
2056 * @opgroup og_sse2_pcksclr_datamove
2057 * @opxcpttype 4UA
2058 * @optest op1=1 op2=2 -> op1=2
2059 * @optest op1=0 op2=-42 -> op1=-42
2060 */
2061FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2062{
2063 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2064 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2065 if (IEM_IS_MODRM_REG_MODE(bRm))
2066 {
2067 /*
2068 * XMM128, XMM128.
2069 */
2070 IEM_MC_BEGIN(0, 0);
2071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2072 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2073 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2074 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2075 IEM_GET_MODRM_RM(pVCpu, bRm));
2076 IEM_MC_ADVANCE_RIP_AND_FINISH();
2077 IEM_MC_END();
2078 }
2079 else
2080 {
2081 /*
2082 * XMM128, [mem128].
2083 */
2084 IEM_MC_BEGIN(0, 0);
2085 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2086 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2087
2088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2090 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2091 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2092
2093 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2094 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2095
2096 IEM_MC_ADVANCE_RIP_AND_FINISH();
2097 IEM_MC_END();
2098 }
2099}
2100
2101
2102/**
2103 * @opcode 0x10
2104 * @oppfx 0xf3
2105 * @opcpuid sse
2106 * @opgroup og_sse_simdfp_datamove
2107 * @opxcpttype 5
2108 * @optest op1=1 op2=2 -> op1=2
2109 * @optest op1=0 op2=-22 -> op1=-22
2110 */
2111FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2112{
2113 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2114 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2115 if (IEM_IS_MODRM_REG_MODE(bRm))
2116 {
2117 /*
2118 * XMM32, XMM32.
2119 */
2120 IEM_MC_BEGIN(0, 0);
2121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2122 IEM_MC_LOCAL(uint32_t, uSrc);
2123
2124 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2125 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2126 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/ );
2127 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2128
2129 IEM_MC_ADVANCE_RIP_AND_FINISH();
2130 IEM_MC_END();
2131 }
2132 else
2133 {
2134 /*
2135 * XMM128, [mem32].
2136 */
2137 IEM_MC_BEGIN(0, 0);
2138 IEM_MC_LOCAL(uint32_t, uSrc);
2139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2140
2141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2143 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2144 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2145
2146 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2147 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2148
2149 IEM_MC_ADVANCE_RIP_AND_FINISH();
2150 IEM_MC_END();
2151 }
2152}
2153
2154
2155/**
2156 * @opcode 0x10
2157 * @oppfx 0xf2
2158 * @opcpuid sse2
2159 * @opgroup og_sse2_pcksclr_datamove
2160 * @opxcpttype 5
2161 * @optest op1=1 op2=2 -> op1=2
2162 * @optest op1=0 op2=-42 -> op1=-42
2163 */
2164FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2165{
2166 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2167 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2168 if (IEM_IS_MODRM_REG_MODE(bRm))
2169 {
2170 /*
2171 * XMM64, XMM64.
2172 */
2173 IEM_MC_BEGIN(0, 0);
2174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2175 IEM_MC_LOCAL(uint64_t, uSrc);
2176
2177 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2178 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2179 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2180 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2181
2182 IEM_MC_ADVANCE_RIP_AND_FINISH();
2183 IEM_MC_END();
2184 }
2185 else
2186 {
2187 /*
2188 * XMM128, [mem64].
2189 */
2190 IEM_MC_BEGIN(0, 0);
2191 IEM_MC_LOCAL(uint64_t, uSrc);
2192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2193
2194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2196 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2197 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2198
2199 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2200 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2201
2202 IEM_MC_ADVANCE_RIP_AND_FINISH();
2203 IEM_MC_END();
2204 }
2205}
2206
2207
2208/**
2209 * @opcode 0x11
2210 * @oppfx none
2211 * @opcpuid sse
2212 * @opgroup og_sse_simdfp_datamove
2213 * @opxcpttype 4UA
2214 * @optest op1=1 op2=2 -> op1=2
2215 * @optest op1=0 op2=-42 -> op1=-42
2216 */
2217FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2218{
2219 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2220 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2221 if (IEM_IS_MODRM_REG_MODE(bRm))
2222 {
2223 /*
2224 * XMM128, XMM128.
2225 */
2226 IEM_MC_BEGIN(0, 0);
2227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2228 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2229 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2230 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2231 IEM_GET_MODRM_REG(pVCpu, bRm));
2232 IEM_MC_ADVANCE_RIP_AND_FINISH();
2233 IEM_MC_END();
2234 }
2235 else
2236 {
2237 /*
2238 * [mem128], XMM128.
2239 */
2240 IEM_MC_BEGIN(0, 0);
2241 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2242 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2243
2244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2246 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2247 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2248
2249 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2250 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2251
2252 IEM_MC_ADVANCE_RIP_AND_FINISH();
2253 IEM_MC_END();
2254 }
2255}
2256
2257
2258/**
2259 * @opcode 0x11
2260 * @oppfx 0x66
2261 * @opcpuid sse2
2262 * @opgroup og_sse2_pcksclr_datamove
2263 * @opxcpttype 4UA
2264 * @optest op1=1 op2=2 -> op1=2
2265 * @optest op1=0 op2=-42 -> op1=-42
2266 */
2267FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2268{
2269 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2270 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2271 if (IEM_IS_MODRM_REG_MODE(bRm))
2272 {
2273 /*
2274 * XMM128, XMM128.
2275 */
2276 IEM_MC_BEGIN(0, 0);
2277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2278 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2279 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2280 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2281 IEM_GET_MODRM_REG(pVCpu, bRm));
2282 IEM_MC_ADVANCE_RIP_AND_FINISH();
2283 IEM_MC_END();
2284 }
2285 else
2286 {
2287 /*
2288 * [mem128], XMM128.
2289 */
2290 IEM_MC_BEGIN(0, 0);
2291 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2293
2294 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2296 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2297 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2298
2299 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2300 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2301
2302 IEM_MC_ADVANCE_RIP_AND_FINISH();
2303 IEM_MC_END();
2304 }
2305}
2306
2307
2308/**
2309 * @opcode 0x11
2310 * @oppfx 0xf3
2311 * @opcpuid sse
2312 * @opgroup og_sse_simdfp_datamove
2313 * @opxcpttype 5
2314 * @optest op1=1 op2=2 -> op1=2
2315 * @optest op1=0 op2=-22 -> op1=-22
2316 */
2317FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2318{
2319 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2320 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2321 if (IEM_IS_MODRM_REG_MODE(bRm))
2322 {
2323 /*
2324 * XMM32, XMM32.
2325 */
2326 IEM_MC_BEGIN(0, 0);
2327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2328 IEM_MC_LOCAL(uint32_t, uSrc);
2329
2330 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2331 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2332 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2333 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2334
2335 IEM_MC_ADVANCE_RIP_AND_FINISH();
2336 IEM_MC_END();
2337 }
2338 else
2339 {
2340 /*
2341 * [mem32], XMM32.
2342 */
2343 IEM_MC_BEGIN(0, 0);
2344 IEM_MC_LOCAL(uint32_t, uSrc);
2345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2346
2347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2349 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2350 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2351
2352 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2353 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2354
2355 IEM_MC_ADVANCE_RIP_AND_FINISH();
2356 IEM_MC_END();
2357 }
2358}
2359
2360
2361/**
2362 * @opcode 0x11
2363 * @oppfx 0xf2
2364 * @opcpuid sse2
2365 * @opgroup og_sse2_pcksclr_datamove
2366 * @opxcpttype 5
2367 * @optest op1=1 op2=2 -> op1=2
2368 * @optest op1=0 op2=-42 -> op1=-42
2369 */
2370FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2371{
2372 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2373 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2374 if (IEM_IS_MODRM_REG_MODE(bRm))
2375 {
2376 /*
2377 * XMM64, XMM64.
2378 */
2379 IEM_MC_BEGIN(0, 0);
2380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2381 IEM_MC_LOCAL(uint64_t, uSrc);
2382
2383 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2384 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2385 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2386 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2387
2388 IEM_MC_ADVANCE_RIP_AND_FINISH();
2389 IEM_MC_END();
2390 }
2391 else
2392 {
2393 /*
2394 * [mem64], XMM64.
2395 */
2396 IEM_MC_BEGIN(0, 0);
2397 IEM_MC_LOCAL(uint64_t, uSrc);
2398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2399
2400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2402 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2403 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2404
2405 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2406 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2407
2408 IEM_MC_ADVANCE_RIP_AND_FINISH();
2409 IEM_MC_END();
2410 }
2411}
2412
2413
2414FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2415{
2416 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2417 if (IEM_IS_MODRM_REG_MODE(bRm))
2418 {
2419 /**
2420 * @opcode 0x12
2421 * @opcodesub 11 mr/reg
2422 * @oppfx none
2423 * @opcpuid sse
2424 * @opgroup og_sse_simdfp_datamove
2425 * @opxcpttype 5
2426 * @optest op1=1 op2=2 -> op1=2
2427 * @optest op1=0 op2=-42 -> op1=-42
2428 */
2429 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2430
2431 IEM_MC_BEGIN(0, 0);
2432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2433 IEM_MC_LOCAL(uint64_t, uSrc);
2434
2435 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2436 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2437 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1 /* a_iQword*/);
2438 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2439
2440 IEM_MC_ADVANCE_RIP_AND_FINISH();
2441 IEM_MC_END();
2442 }
2443 else
2444 {
2445 /**
2446 * @opdone
2447 * @opcode 0x12
2448 * @opcodesub !11 mr/reg
2449 * @oppfx none
2450 * @opcpuid sse
2451 * @opgroup og_sse_simdfp_datamove
2452 * @opxcpttype 5
2453 * @optest op1=1 op2=2 -> op1=2
2454 * @optest op1=0 op2=-42 -> op1=-42
2455 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2456 */
2457 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2458
2459 IEM_MC_BEGIN(0, 0);
2460 IEM_MC_LOCAL(uint64_t, uSrc);
2461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2462
2463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2465 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2466 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2467
2468 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2469 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2470
2471 IEM_MC_ADVANCE_RIP_AND_FINISH();
2472 IEM_MC_END();
2473 }
2474}
2475
2476
2477/**
2478 * @opcode 0x12
2479 * @opcodesub !11 mr/reg
2480 * @oppfx 0x66
2481 * @opcpuid sse2
2482 * @opgroup og_sse2_pcksclr_datamove
2483 * @opxcpttype 5
2484 * @optest op1=1 op2=2 -> op1=2
2485 * @optest op1=0 op2=-42 -> op1=-42
2486 */
2487FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2488{
2489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2490 if (IEM_IS_MODRM_MEM_MODE(bRm))
2491 {
2492 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2493
2494 IEM_MC_BEGIN(0, 0);
2495 IEM_MC_LOCAL(uint64_t, uSrc);
2496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2497
2498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2500 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2501 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2502
2503 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2504 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2505
2506 IEM_MC_ADVANCE_RIP_AND_FINISH();
2507 IEM_MC_END();
2508 }
2509
2510 /**
2511 * @opdone
2512 * @opmnemonic ud660f12m3
2513 * @opcode 0x12
2514 * @opcodesub 11 mr/reg
2515 * @oppfx 0x66
2516 * @opunused immediate
2517 * @opcpuid sse
2518 * @optest ->
2519 */
2520 else
2521 IEMOP_RAISE_INVALID_OPCODE_RET();
2522}
2523
2524
2525/**
2526 * @opcode 0x12
2527 * @oppfx 0xf3
2528 * @opcpuid sse3
2529 * @opgroup og_sse3_pcksclr_datamove
2530 * @opxcpttype 4
2531 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2532 * op1=0x00000002000000020000000100000001
2533 */
2534FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2535{
2536 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2538 if (IEM_IS_MODRM_REG_MODE(bRm))
2539 {
2540 /*
2541 * XMM, XMM.
2542 */
2543 IEM_MC_BEGIN(0, 0);
2544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2545 IEM_MC_LOCAL(RTUINT128U, uSrc);
2546
2547 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2548 IEM_MC_PREPARE_SSE_USAGE();
2549
2550 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2551 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2552 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2553 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2554 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2555
2556 IEM_MC_ADVANCE_RIP_AND_FINISH();
2557 IEM_MC_END();
2558 }
2559 else
2560 {
2561 /*
2562 * XMM, [mem128].
2563 */
2564 IEM_MC_BEGIN(0, 0);
2565 IEM_MC_LOCAL(RTUINT128U, uSrc);
2566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2567
2568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2570 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2571 IEM_MC_PREPARE_SSE_USAGE();
2572
2573 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2574 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2575 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2576 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2577 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2578
2579 IEM_MC_ADVANCE_RIP_AND_FINISH();
2580 IEM_MC_END();
2581 }
2582}
2583
2584
2585/**
2586 * @opcode 0x12
2587 * @oppfx 0xf2
2588 * @opcpuid sse3
2589 * @opgroup og_sse3_pcksclr_datamove
2590 * @opxcpttype 5
2591 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2592 * op1=0x22222222111111112222222211111111
2593 */
2594FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2595{
2596 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2598 if (IEM_IS_MODRM_REG_MODE(bRm))
2599 {
2600 /*
2601 * XMM128, XMM64.
2602 */
2603 IEM_MC_BEGIN(0, 0);
2604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2605 IEM_MC_LOCAL(uint64_t, uSrc);
2606
2607 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2608 IEM_MC_PREPARE_SSE_USAGE();
2609
2610 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2611 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2612 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
2613
2614 IEM_MC_ADVANCE_RIP_AND_FINISH();
2615 IEM_MC_END();
2616 }
2617 else
2618 {
2619 /*
2620 * XMM128, [mem64].
2621 */
2622 IEM_MC_BEGIN(0, 0);
2623 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2624 IEM_MC_LOCAL(uint64_t, uSrc);
2625
2626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2628 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2629 IEM_MC_PREPARE_SSE_USAGE();
2630
2631 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2632 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2633 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
2634
2635 IEM_MC_ADVANCE_RIP_AND_FINISH();
2636 IEM_MC_END();
2637 }
2638}
2639
2640
2641/**
2642 * @opcode 0x13
2643 * @opcodesub !11 mr/reg
2644 * @oppfx none
2645 * @opcpuid sse
2646 * @opgroup og_sse_simdfp_datamove
2647 * @opxcpttype 5
2648 * @optest op1=1 op2=2 -> op1=2
2649 * @optest op1=0 op2=-42 -> op1=-42
2650 */
2651FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2652{
2653 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2654 if (IEM_IS_MODRM_MEM_MODE(bRm))
2655 {
2656 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2657
2658 IEM_MC_BEGIN(0, 0);
2659 IEM_MC_LOCAL(uint64_t, uSrc);
2660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2661
2662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2664 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2665 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2666
2667 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2668 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2669
2670 IEM_MC_ADVANCE_RIP_AND_FINISH();
2671 IEM_MC_END();
2672 }
2673
2674 /**
2675 * @opdone
2676 * @opmnemonic ud0f13m3
2677 * @opcode 0x13
2678 * @opcodesub 11 mr/reg
2679 * @oppfx none
2680 * @opunused immediate
2681 * @opcpuid sse
2682 * @optest ->
2683 */
2684 else
2685 IEMOP_RAISE_INVALID_OPCODE_RET();
2686}
2687
2688
2689/**
2690 * @opcode 0x13
2691 * @opcodesub !11 mr/reg
2692 * @oppfx 0x66
2693 * @opcpuid sse2
2694 * @opgroup og_sse2_pcksclr_datamove
2695 * @opxcpttype 5
2696 * @optest op1=1 op2=2 -> op1=2
2697 * @optest op1=0 op2=-42 -> op1=-42
2698 */
2699FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2700{
2701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2702 if (IEM_IS_MODRM_MEM_MODE(bRm))
2703 {
2704 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2705
2706 IEM_MC_BEGIN(0, 0);
2707 IEM_MC_LOCAL(uint64_t, uSrc);
2708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2709
2710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2712 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2713 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2714
2715 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2716 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2717
2718 IEM_MC_ADVANCE_RIP_AND_FINISH();
2719 IEM_MC_END();
2720 }
2721
2722 /**
2723 * @opdone
2724 * @opmnemonic ud660f13m3
2725 * @opcode 0x13
2726 * @opcodesub 11 mr/reg
2727 * @oppfx 0x66
2728 * @opunused immediate
2729 * @opcpuid sse
2730 * @optest ->
2731 */
2732 else
2733 IEMOP_RAISE_INVALID_OPCODE_RET();
2734}
2735
2736
2737/**
2738 * @opmnemonic udf30f13
2739 * @opcode 0x13
2740 * @oppfx 0xf3
2741 * @opunused intel-modrm
2742 * @opcpuid sse
2743 * @optest ->
2744 * @opdone
2745 */
2746
2747/**
2748 * @opmnemonic udf20f13
2749 * @opcode 0x13
2750 * @oppfx 0xf2
2751 * @opunused intel-modrm
2752 * @opcpuid sse
2753 * @optest ->
2754 * @opdone
2755 */
2756
2757/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2758FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2759{
2760 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2761 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2762}
2763
2764
2765/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2766FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2767{
2768 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2769 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2770}
2771
2772
2773/**
2774 * @opdone
2775 * @opmnemonic udf30f14
2776 * @opcode 0x14
2777 * @oppfx 0xf3
2778 * @opunused intel-modrm
2779 * @opcpuid sse
2780 * @optest ->
2781 * @opdone
2782 */
2783
2784/**
2785 * @opmnemonic udf20f14
2786 * @opcode 0x14
2787 * @oppfx 0xf2
2788 * @opunused intel-modrm
2789 * @opcpuid sse
2790 * @optest ->
2791 * @opdone
2792 */
2793
2794/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2795FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2796{
2797 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2798 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2799}
2800
2801
2802/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2803FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2804{
2805 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2806 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2807}
2808
2809
2810/* Opcode 0xf3 0x0f 0x15 - invalid */
2811/* Opcode 0xf2 0x0f 0x15 - invalid */
2812
2813/**
2814 * @opdone
2815 * @opmnemonic udf30f15
2816 * @opcode 0x15
2817 * @oppfx 0xf3
2818 * @opunused intel-modrm
2819 * @opcpuid sse
2820 * @optest ->
2821 * @opdone
2822 */
2823
2824/**
2825 * @opmnemonic udf20f15
2826 * @opcode 0x15
2827 * @oppfx 0xf2
2828 * @opunused intel-modrm
2829 * @opcpuid sse
2830 * @optest ->
2831 * @opdone
2832 */
2833
2834FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2835{
2836 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2837 if (IEM_IS_MODRM_REG_MODE(bRm))
2838 {
2839 /**
2840 * @opcode 0x16
2841 * @opcodesub 11 mr/reg
2842 * @oppfx none
2843 * @opcpuid sse
2844 * @opgroup og_sse_simdfp_datamove
2845 * @opxcpttype 5
2846 * @optest op1=1 op2=2 -> op1=2
2847 * @optest op1=0 op2=-42 -> op1=-42
2848 */
2849 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2850
2851 IEM_MC_BEGIN(0, 0);
2852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2853 IEM_MC_LOCAL(uint64_t, uSrc);
2854
2855 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2856 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2857 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2858 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2859
2860 IEM_MC_ADVANCE_RIP_AND_FINISH();
2861 IEM_MC_END();
2862 }
2863 else
2864 {
2865 /**
2866 * @opdone
2867 * @opcode 0x16
2868 * @opcodesub !11 mr/reg
2869 * @oppfx none
2870 * @opcpuid sse
2871 * @opgroup og_sse_simdfp_datamove
2872 * @opxcpttype 5
2873 * @optest op1=1 op2=2 -> op1=2
2874 * @optest op1=0 op2=-42 -> op1=-42
2875 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2876 */
2877 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2878
2879 IEM_MC_BEGIN(0, 0);
2880 IEM_MC_LOCAL(uint64_t, uSrc);
2881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2882
2883 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2885 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2886 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2887
2888 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2889 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2890
2891 IEM_MC_ADVANCE_RIP_AND_FINISH();
2892 IEM_MC_END();
2893 }
2894}
2895
2896
2897/**
2898 * @opcode 0x16
2899 * @opcodesub !11 mr/reg
2900 * @oppfx 0x66
2901 * @opcpuid sse2
2902 * @opgroup og_sse2_pcksclr_datamove
2903 * @opxcpttype 5
2904 * @optest op1=1 op2=2 -> op1=2
2905 * @optest op1=0 op2=-42 -> op1=-42
2906 */
2907FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2908{
2909 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2910 if (IEM_IS_MODRM_MEM_MODE(bRm))
2911 {
2912 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2913
2914 IEM_MC_BEGIN(0, 0);
2915 IEM_MC_LOCAL(uint64_t, uSrc);
2916 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2917
2918 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2920 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2921 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2922
2923 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2924 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2925
2926 IEM_MC_ADVANCE_RIP_AND_FINISH();
2927 IEM_MC_END();
2928 }
2929
2930 /**
2931 * @opdone
2932 * @opmnemonic ud660f16m3
2933 * @opcode 0x16
2934 * @opcodesub 11 mr/reg
2935 * @oppfx 0x66
2936 * @opunused immediate
2937 * @opcpuid sse
2938 * @optest ->
2939 */
2940 else
2941 IEMOP_RAISE_INVALID_OPCODE_RET();
2942}
2943
2944
2945/**
2946 * @opcode 0x16
2947 * @oppfx 0xf3
2948 * @opcpuid sse3
2949 * @opgroup og_sse3_pcksclr_datamove
2950 * @opxcpttype 4
2951 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
2952 * op1=0x00000002000000020000000100000001
2953 */
2954FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
2955{
2956 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2957 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2958 if (IEM_IS_MODRM_REG_MODE(bRm))
2959 {
2960 /*
2961 * XMM128, XMM128.
2962 */
2963 IEM_MC_BEGIN(0, 0);
2964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2965 IEM_MC_LOCAL(RTUINT128U, uSrc);
2966
2967 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2968 IEM_MC_PREPARE_SSE_USAGE();
2969
2970 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2971 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
2972 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
2973 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
2974 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
2975
2976 IEM_MC_ADVANCE_RIP_AND_FINISH();
2977 IEM_MC_END();
2978 }
2979 else
2980 {
2981 /*
2982 * XMM128, [mem128].
2983 */
2984 IEM_MC_BEGIN(0, 0);
2985 IEM_MC_LOCAL(RTUINT128U, uSrc);
2986 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2987
2988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2990 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2991 IEM_MC_PREPARE_SSE_USAGE();
2992
2993 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2994 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
2995 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
2996 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
2997 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
2998
2999 IEM_MC_ADVANCE_RIP_AND_FINISH();
3000 IEM_MC_END();
3001 }
3002}
3003
3004/**
3005 * @opdone
3006 * @opmnemonic udf30f16
3007 * @opcode 0x16
3008 * @oppfx 0xf2
3009 * @opunused intel-modrm
3010 * @opcpuid sse
3011 * @optest ->
3012 * @opdone
3013 */
3014
3015
3016/**
3017 * @opcode 0x17
3018 * @opcodesub !11 mr/reg
3019 * @oppfx none
3020 * @opcpuid sse
3021 * @opgroup og_sse_simdfp_datamove
3022 * @opxcpttype 5
3023 * @optest op1=1 op2=2 -> op1=2
3024 * @optest op1=0 op2=-42 -> op1=-42
3025 */
3026FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3027{
3028 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3029 if (IEM_IS_MODRM_MEM_MODE(bRm))
3030 {
3031 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3032
3033 IEM_MC_BEGIN(0, 0);
3034 IEM_MC_LOCAL(uint64_t, uSrc);
3035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3036
3037 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3039 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3040 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3041
3042 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3043 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3044
3045 IEM_MC_ADVANCE_RIP_AND_FINISH();
3046 IEM_MC_END();
3047 }
3048
3049 /**
3050 * @opdone
3051 * @opmnemonic ud0f17m3
3052 * @opcode 0x17
3053 * @opcodesub 11 mr/reg
3054 * @oppfx none
3055 * @opunused immediate
3056 * @opcpuid sse
3057 * @optest ->
3058 */
3059 else
3060 IEMOP_RAISE_INVALID_OPCODE_RET();
3061}
3062
3063
3064/**
3065 * @opcode 0x17
3066 * @opcodesub !11 mr/reg
3067 * @oppfx 0x66
3068 * @opcpuid sse2
3069 * @opgroup og_sse2_pcksclr_datamove
3070 * @opxcpttype 5
3071 * @optest op1=1 op2=2 -> op1=2
3072 * @optest op1=0 op2=-42 -> op1=-42
3073 */
3074FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3075{
3076 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3077 if (IEM_IS_MODRM_MEM_MODE(bRm))
3078 {
3079 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3080
3081 IEM_MC_BEGIN(0, 0);
3082 IEM_MC_LOCAL(uint64_t, uSrc);
3083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3084
3085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3087 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3088 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3089
3090 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3091 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3092
3093 IEM_MC_ADVANCE_RIP_AND_FINISH();
3094 IEM_MC_END();
3095 }
3096
3097 /**
3098 * @opdone
3099 * @opmnemonic ud660f17m3
3100 * @opcode 0x17
3101 * @opcodesub 11 mr/reg
3102 * @oppfx 0x66
3103 * @opunused immediate
3104 * @opcpuid sse
3105 * @optest ->
3106 */
3107 else
3108 IEMOP_RAISE_INVALID_OPCODE_RET();
3109}
3110
3111
3112/**
3113 * @opdone
3114 * @opmnemonic udf30f17
3115 * @opcode 0x17
3116 * @oppfx 0xf3
3117 * @opunused intel-modrm
3118 * @opcpuid sse
3119 * @optest ->
3120 * @opdone
3121 */
3122
3123/**
3124 * @opmnemonic udf20f17
3125 * @opcode 0x17
3126 * @oppfx 0xf2
3127 * @opunused intel-modrm
3128 * @opcpuid sse
3129 * @optest ->
3130 * @opdone
3131 */
3132
3133
3134/** Opcode 0x0f 0x18. */
3135FNIEMOP_DEF(iemOp_prefetch_Grp16)
3136{
3137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3138 if (IEM_IS_MODRM_MEM_MODE(bRm))
3139 {
3140 switch (IEM_GET_MODRM_REG_8(bRm))
3141 {
3142 case 4: /* Aliased to /0 for the time being according to AMD. */
3143 case 5: /* Aliased to /0 for the time being according to AMD. */
3144 case 6: /* Aliased to /0 for the time being according to AMD. */
3145 case 7: /* Aliased to /0 for the time being according to AMD. */
3146 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3147 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3148 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3149 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3150 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3151 }
3152
3153 IEM_MC_BEGIN(0, 0);
3154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3157 /* Currently a NOP. */
3158 IEM_MC_NOREF(GCPtrEffSrc);
3159 IEM_MC_ADVANCE_RIP_AND_FINISH();
3160 IEM_MC_END();
3161 }
3162 else
3163 IEMOP_RAISE_INVALID_OPCODE_RET();
3164}
3165
3166
3167/** Opcode 0x0f 0x19..0x1f. */
3168FNIEMOP_DEF(iemOp_nop_Ev)
3169{
3170 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3171 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3172 if (IEM_IS_MODRM_REG_MODE(bRm))
3173 {
3174 IEM_MC_BEGIN(0, 0);
3175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3176 IEM_MC_ADVANCE_RIP_AND_FINISH();
3177 IEM_MC_END();
3178 }
3179 else
3180 {
3181 IEM_MC_BEGIN(0, 0);
3182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3185 /* Currently a NOP. */
3186 IEM_MC_NOREF(GCPtrEffSrc);
3187 IEM_MC_ADVANCE_RIP_AND_FINISH();
3188 IEM_MC_END();
3189 }
3190}
3191
3192
3193/** Opcode 0x0f 0x20. */
3194FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3195{
3196 /* mod is ignored, as is operand size overrides. */
3197/** @todo testcase: check memory encoding. */
3198 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3199 IEMOP_HLP_MIN_386();
3200 if (IEM_IS_64BIT_CODE(pVCpu))
3201 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3202 else
3203 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3204
3205 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3206 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3207 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3208 {
3209 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3210 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3211 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3212 iCrReg |= 8;
3213 }
3214 switch (iCrReg)
3215 {
3216 case 0: case 2: case 3: case 4: case 8:
3217 break;
3218 default:
3219 IEMOP_RAISE_INVALID_OPCODE_RET();
3220 }
3221 IEMOP_HLP_DONE_DECODING();
3222
3223 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3224 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3225 iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3226}
3227
3228
3229/** Opcode 0x0f 0x21. */
3230FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3231{
3232/** @todo testcase: check memory encoding. */
3233 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3234 IEMOP_HLP_MIN_386();
3235 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3237 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3238 IEMOP_RAISE_INVALID_OPCODE_RET();
3239 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3240 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3241 iemCImpl_mov_Rd_Dd, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3242}
3243
3244
3245/** Opcode 0x0f 0x22. */
3246FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3247{
3248 /* mod is ignored, as is operand size overrides. */
3249 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3250 IEMOP_HLP_MIN_386();
3251 if (IEM_IS_64BIT_CODE(pVCpu))
3252 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3253 else
3254 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3255
3256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3257 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3258 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3259 {
3260 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3261 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3262 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3263 iCrReg |= 8;
3264 }
3265 switch (iCrReg)
3266 {
3267 case 0: case 2: case 3: case 4: case 8:
3268 break;
3269 default:
3270 IEMOP_RAISE_INVALID_OPCODE_RET();
3271 }
3272 IEMOP_HLP_DONE_DECODING();
3273
3274 /** @todo r=aeichner Split this up as flushing the cr0 is excessive for crX != 0? */
3275 if (iCrReg & (2 | 8))
3276 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, 0,
3277 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3278 else
3279 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0) | RT_BIT_64(kIemNativeGstReg_Cr4),
3280 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3281}
3282
3283
3284/** Opcode 0x0f 0x23. */
3285FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3286{
3287 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3288 IEMOP_HLP_MIN_386();
3289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3291 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3292 IEMOP_RAISE_INVALID_OPCODE_RET();
3293 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, 0,
3294 iemCImpl_mov_Dd_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3295}
3296
3297
3298/** Opcode 0x0f 0x24. */
3299FNIEMOP_DEF(iemOp_mov_Rd_Td)
3300{
3301 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3302 IEMOP_HLP_MIN_386();
3303 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3305 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3306 IEMOP_RAISE_INVALID_OPCODE_RET();
3307 IEM_MC_DEFER_TO_CIMPL_2_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3308 iemCImpl_mov_Rd_Td, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3309}
3310
3311
3312/** Opcode 0x0f 0x26. */
3313FNIEMOP_DEF(iemOp_mov_Td_Rd)
3314{
3315 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3316 IEMOP_HLP_MIN_386();
3317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3319 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3320 IEMOP_RAISE_INVALID_OPCODE_RET();
3321 IEM_MC_DEFER_TO_CIMPL_2_RET(0, 0, iemCImpl_mov_Td_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3322}
3323
3324
3325/**
3326 * @opcode 0x28
3327 * @oppfx none
3328 * @opcpuid sse
3329 * @opgroup og_sse_simdfp_datamove
3330 * @opxcpttype 1
3331 * @optest op1=1 op2=2 -> op1=2
3332 * @optest op1=0 op2=-42 -> op1=-42
3333 */
3334FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3335{
3336 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3337 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3338 if (IEM_IS_MODRM_REG_MODE(bRm))
3339 {
3340 /*
3341 * Register, register.
3342 */
3343 IEM_MC_BEGIN(0, 0);
3344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3345 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3346 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3347 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3348 IEM_GET_MODRM_RM(pVCpu, bRm));
3349 IEM_MC_ADVANCE_RIP_AND_FINISH();
3350 IEM_MC_END();
3351 }
3352 else
3353 {
3354 /*
3355 * Register, memory.
3356 */
3357 IEM_MC_BEGIN(0, 0);
3358 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3360
3361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3363 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3364 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3365
3366 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3367 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3368
3369 IEM_MC_ADVANCE_RIP_AND_FINISH();
3370 IEM_MC_END();
3371 }
3372}
3373
3374/**
3375 * @opcode 0x28
3376 * @oppfx 66
3377 * @opcpuid sse2
3378 * @opgroup og_sse2_pcksclr_datamove
3379 * @opxcpttype 1
3380 * @optest op1=1 op2=2 -> op1=2
3381 * @optest op1=0 op2=-42 -> op1=-42
3382 */
3383FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3384{
3385 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3386 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3387 if (IEM_IS_MODRM_REG_MODE(bRm))
3388 {
3389 /*
3390 * Register, register.
3391 */
3392 IEM_MC_BEGIN(0, 0);
3393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3394 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3395 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3396 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3397 IEM_GET_MODRM_RM(pVCpu, bRm));
3398 IEM_MC_ADVANCE_RIP_AND_FINISH();
3399 IEM_MC_END();
3400 }
3401 else
3402 {
3403 /*
3404 * Register, memory.
3405 */
3406 IEM_MC_BEGIN(0, 0);
3407 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3409
3410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3412 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3413 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3414
3415 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3416 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3417
3418 IEM_MC_ADVANCE_RIP_AND_FINISH();
3419 IEM_MC_END();
3420 }
3421}
3422
3423/* Opcode 0xf3 0x0f 0x28 - invalid */
3424/* Opcode 0xf2 0x0f 0x28 - invalid */
3425
3426/**
3427 * @opcode 0x29
3428 * @oppfx none
3429 * @opcpuid sse
3430 * @opgroup og_sse_simdfp_datamove
3431 * @opxcpttype 1
3432 * @optest op1=1 op2=2 -> op1=2
3433 * @optest op1=0 op2=-42 -> op1=-42
3434 */
3435FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3436{
3437 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3438 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3439 if (IEM_IS_MODRM_REG_MODE(bRm))
3440 {
3441 /*
3442 * Register, register.
3443 */
3444 IEM_MC_BEGIN(0, 0);
3445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3446 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3447 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3448 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3449 IEM_GET_MODRM_REG(pVCpu, bRm));
3450 IEM_MC_ADVANCE_RIP_AND_FINISH();
3451 IEM_MC_END();
3452 }
3453 else
3454 {
3455 /*
3456 * Memory, register.
3457 */
3458 IEM_MC_BEGIN(0, 0);
3459 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3461
3462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3464 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3465 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3466
3467 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3468 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3469
3470 IEM_MC_ADVANCE_RIP_AND_FINISH();
3471 IEM_MC_END();
3472 }
3473}
3474
3475/**
3476 * @opcode 0x29
3477 * @oppfx 66
3478 * @opcpuid sse2
3479 * @opgroup og_sse2_pcksclr_datamove
3480 * @opxcpttype 1
3481 * @optest op1=1 op2=2 -> op1=2
3482 * @optest op1=0 op2=-42 -> op1=-42
3483 */
3484FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3485{
3486 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3487 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3488 if (IEM_IS_MODRM_REG_MODE(bRm))
3489 {
3490 /*
3491 * Register, register.
3492 */
3493 IEM_MC_BEGIN(0, 0);
3494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3495 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3496 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3497 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3498 IEM_GET_MODRM_REG(pVCpu, bRm));
3499 IEM_MC_ADVANCE_RIP_AND_FINISH();
3500 IEM_MC_END();
3501 }
3502 else
3503 {
3504 /*
3505 * Memory, register.
3506 */
3507 IEM_MC_BEGIN(0, 0);
3508 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3510
3511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3513 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3514 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3515
3516 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3517 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3518
3519 IEM_MC_ADVANCE_RIP_AND_FINISH();
3520 IEM_MC_END();
3521 }
3522}
3523
3524/* Opcode 0xf3 0x0f 0x29 - invalid */
3525/* Opcode 0xf2 0x0f 0x29 - invalid */
3526
3527
3528/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3529FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3530{
3531 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3532 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3533 if (IEM_IS_MODRM_REG_MODE(bRm))
3534 {
3535 /*
3536 * XMM, MMX
3537 */
3538 IEM_MC_BEGIN(0, 0);
3539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3540 IEM_MC_LOCAL(X86XMMREG, Dst);
3541 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3542 IEM_MC_ARG(uint64_t, u64Src, 1);
3543 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3544 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3545 IEM_MC_PREPARE_FPU_USAGE();
3546 IEM_MC_FPU_TO_MMX_MODE();
3547
3548 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3549 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3550
3551 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2ps_u128, pDst, u64Src);
3552 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3553 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3554
3555 IEM_MC_ADVANCE_RIP_AND_FINISH();
3556 IEM_MC_END();
3557 }
3558 else
3559 {
3560 /*
3561 * XMM, [mem64]
3562 */
3563 IEM_MC_BEGIN(0, 0);
3564 IEM_MC_LOCAL(X86XMMREG, Dst);
3565 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3566 IEM_MC_ARG(uint64_t, u64Src, 1);
3567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3568
3569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3571 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3572 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3573 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3574
3575 IEM_MC_PREPARE_FPU_USAGE();
3576 IEM_MC_FPU_TO_MMX_MODE();
3577
3578 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2ps_u128, pDst, u64Src);
3579 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3580 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3581
3582 IEM_MC_ADVANCE_RIP_AND_FINISH();
3583 IEM_MC_END();
3584 }
3585}
3586
3587
3588/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3589FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3590{
3591 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3592 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3593 if (IEM_IS_MODRM_REG_MODE(bRm))
3594 {
3595 /*
3596 * XMM, MMX
3597 */
3598 IEM_MC_BEGIN(0, 0);
3599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3600 IEM_MC_LOCAL(X86XMMREG, Dst);
3601 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3602 IEM_MC_ARG(uint64_t, u64Src, 1);
3603 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3604 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3605 IEM_MC_PREPARE_FPU_USAGE();
3606 IEM_MC_FPU_TO_MMX_MODE();
3607
3608 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3609
3610 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2pd_u128, pDst, u64Src);
3611 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3612 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3613
3614 IEM_MC_ADVANCE_RIP_AND_FINISH();
3615 IEM_MC_END();
3616 }
3617 else
3618 {
3619 /*
3620 * XMM, [mem64]
3621 */
3622 IEM_MC_BEGIN(0, 0);
3623 IEM_MC_LOCAL(X86XMMREG, Dst);
3624 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
3625 IEM_MC_ARG(uint64_t, u64Src, 1);
3626 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3627
3628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3630 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3631 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3632 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3633
3634 /* Doesn't cause a transition to MMX mode. */
3635 IEM_MC_PREPARE_SSE_USAGE();
3636
3637 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpi2pd_u128, pDst, u64Src);
3638 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3639 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3640
3641 IEM_MC_ADVANCE_RIP_AND_FINISH();
3642 IEM_MC_END();
3643 }
3644}
3645
3646
3647/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3648FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3649{
3650 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3651
3652 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3653 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3654 {
3655 if (IEM_IS_MODRM_REG_MODE(bRm))
3656 {
3657 /* XMM, greg64 */
3658 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3659 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3660 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3661 IEM_MC_ARG(const int64_t *, pi64Src, 1);
3662
3663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3664 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3665 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3666
3667 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3668 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i64, pr32Dst, pi64Src);
3669 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3670 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3671
3672 IEM_MC_ADVANCE_RIP_AND_FINISH();
3673 IEM_MC_END();
3674 }
3675 else
3676 {
3677 /* XMM, [mem64] */
3678 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3679 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3680 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3681 IEM_MC_LOCAL(int64_t, i64Src);
3682 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3683 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 1);
3684
3685 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3687 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3688 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3689
3690 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3691 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i64, pr32Dst, pi64Src);
3692 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3693 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3694
3695 IEM_MC_ADVANCE_RIP_AND_FINISH();
3696 IEM_MC_END();
3697 }
3698 }
3699 else
3700 {
3701 if (IEM_IS_MODRM_REG_MODE(bRm))
3702 {
3703 /* greg, XMM */
3704 IEM_MC_BEGIN(0, 0);
3705 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3706 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3707 IEM_MC_ARG(const int32_t *, pi32Src, 1);
3708
3709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3710 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3711 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3712
3713 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3714 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i32, pr32Dst, pi32Src);
3715 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3716 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3717
3718 IEM_MC_ADVANCE_RIP_AND_FINISH();
3719 IEM_MC_END();
3720 }
3721 else
3722 {
3723 /* greg, [mem32] */
3724 IEM_MC_BEGIN(0, 0);
3725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3726 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3727 IEM_MC_LOCAL(int32_t, i32Src);
3728 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 0);
3729 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 1);
3730
3731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3733 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3734 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3735
3736 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3737 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2ss_r32_i32, pr32Dst, pi32Src);
3738 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3739 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3740
3741 IEM_MC_ADVANCE_RIP_AND_FINISH();
3742 IEM_MC_END();
3743 }
3744 }
3745}
3746
3747
3748/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3749FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3750{
3751 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3752
3753 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3754 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3755 {
3756 if (IEM_IS_MODRM_REG_MODE(bRm))
3757 {
3758 /* XMM, greg64 */
3759 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3760 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3761 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3762 IEM_MC_ARG(const int64_t *, pi64Src, 1);
3763
3764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3765 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3766 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3767
3768 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3769 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i64, pr64Dst, pi64Src);
3770 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3771 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3772
3773 IEM_MC_ADVANCE_RIP_AND_FINISH();
3774 IEM_MC_END();
3775 }
3776 else
3777 {
3778 /* XMM, [mem64] */
3779 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3781 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3782 IEM_MC_LOCAL(int64_t, i64Src);
3783 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3784 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 1);
3785
3786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3788 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3789 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3790
3791 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3792 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i64, pr64Dst, pi64Src);
3793 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3794 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3795
3796 IEM_MC_ADVANCE_RIP_AND_FINISH();
3797 IEM_MC_END();
3798 }
3799 }
3800 else
3801 {
3802 if (IEM_IS_MODRM_REG_MODE(bRm))
3803 {
3804 /* XMM, greg32 */
3805 IEM_MC_BEGIN(0, 0);
3806 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3807 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3808 IEM_MC_ARG(const int32_t *, pi32Src, 1);
3809
3810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3811 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3812 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3813
3814 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3815 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i32, pr64Dst, pi32Src);
3816 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3817 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3818
3819 IEM_MC_ADVANCE_RIP_AND_FINISH();
3820 IEM_MC_END();
3821 }
3822 else
3823 {
3824 /* XMM, [mem32] */
3825 IEM_MC_BEGIN(0, 0);
3826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3827 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3828 IEM_MC_LOCAL(int32_t, i32Src);
3829 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 0);
3830 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 1);
3831
3832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3834 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3835 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
3836
3837 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3838 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsi2sd_r64_i32, pr64Dst, pi32Src);
3839 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3840 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3841
3842 IEM_MC_ADVANCE_RIP_AND_FINISH();
3843 IEM_MC_END();
3844 }
3845 }
3846}
3847
3848
3849/**
3850 * @opcode 0x2b
3851 * @opcodesub !11 mr/reg
3852 * @oppfx none
3853 * @opcpuid sse
3854 * @opgroup og_sse1_cachect
3855 * @opxcpttype 1
3856 * @optest op1=1 op2=2 -> op1=2
3857 * @optest op1=0 op2=-42 -> op1=-42
3858 */
3859FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
3860{
3861 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3862 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3863 if (IEM_IS_MODRM_MEM_MODE(bRm))
3864 {
3865 /*
3866 * memory, register.
3867 */
3868 IEM_MC_BEGIN(0, 0);
3869 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3870 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3871
3872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3874 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3875 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3876
3877 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3878 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3879
3880 IEM_MC_ADVANCE_RIP_AND_FINISH();
3881 IEM_MC_END();
3882 }
3883 /* The register, register encoding is invalid. */
3884 else
3885 IEMOP_RAISE_INVALID_OPCODE_RET();
3886}
3887
3888/**
3889 * @opcode 0x2b
3890 * @opcodesub !11 mr/reg
3891 * @oppfx 0x66
3892 * @opcpuid sse2
3893 * @opgroup og_sse2_cachect
3894 * @opxcpttype 1
3895 * @optest op1=1 op2=2 -> op1=2
3896 * @optest op1=0 op2=-42 -> op1=-42
3897 */
3898FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
3899{
3900 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3901 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3902 if (IEM_IS_MODRM_MEM_MODE(bRm))
3903 {
3904 /*
3905 * memory, register.
3906 */
3907 IEM_MC_BEGIN(0, 0);
3908 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3910
3911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3913 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3914 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3915
3916 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3917 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3918
3919 IEM_MC_ADVANCE_RIP_AND_FINISH();
3920 IEM_MC_END();
3921 }
3922 /* The register, register encoding is invalid. */
3923 else
3924 IEMOP_RAISE_INVALID_OPCODE_RET();
3925}
3926/* Opcode 0xf3 0x0f 0x2b - invalid */
3927/* Opcode 0xf2 0x0f 0x2b - invalid */
3928
3929
3930/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
3931FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
3932{
3933 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3934 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3935 if (IEM_IS_MODRM_REG_MODE(bRm))
3936 {
3937 /*
3938 * Register, register.
3939 */
3940 IEM_MC_BEGIN(0, 0);
3941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3942 IEM_MC_LOCAL(uint64_t, u64Dst);
3943 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
3944 IEM_MC_ARG(uint64_t, u64Src, 1);
3945 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3946 IEM_MC_PREPARE_FPU_USAGE();
3947 IEM_MC_FPU_TO_MMX_MODE();
3948
3949 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
3950
3951 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttps2pi_u128, pu64Dst, u64Src);
3952 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3953 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
3954
3955 IEM_MC_ADVANCE_RIP_AND_FINISH();
3956 IEM_MC_END();
3957 }
3958 else
3959 {
3960 /*
3961 * Register, memory.
3962 */
3963 IEM_MC_BEGIN(0, 0);
3964 IEM_MC_LOCAL(uint64_t, u64Dst);
3965 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
3966 IEM_MC_ARG(uint64_t, u64Src, 1);
3967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3968
3969 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3971 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3972 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3973
3974 IEM_MC_PREPARE_FPU_USAGE();
3975 IEM_MC_FPU_TO_MMX_MODE();
3976
3977 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttps2pi_u128, pu64Dst, u64Src);
3978 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3979 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
3980
3981 IEM_MC_ADVANCE_RIP_AND_FINISH();
3982 IEM_MC_END();
3983 }
3984}
3985
3986
3987/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
3988FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
3989{
3990 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3991 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3992 if (IEM_IS_MODRM_REG_MODE(bRm))
3993 {
3994 /*
3995 * Register, register.
3996 */
3997 IEM_MC_BEGIN(0, 0);
3998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3999 IEM_MC_LOCAL(uint64_t, u64Dst);
4000 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4001 IEM_MC_ARG(PCX86XMMREG, pSrc, 1);
4002 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4003 IEM_MC_PREPARE_FPU_USAGE();
4004 IEM_MC_FPU_TO_MMX_MODE();
4005
4006 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4007
4008 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttpd2pi_u128, pu64Dst, pSrc);
4009 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4010 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4011
4012 IEM_MC_ADVANCE_RIP_AND_FINISH();
4013 IEM_MC_END();
4014 }
4015 else
4016 {
4017 /*
4018 * Register, memory.
4019 */
4020 IEM_MC_BEGIN(0, 0);
4021 IEM_MC_LOCAL(uint64_t, u64Dst);
4022 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4023 IEM_MC_LOCAL(X86XMMREG, uSrc);
4024 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 1);
4025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4026
4027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4029 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4030 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4031
4032 IEM_MC_PREPARE_FPU_USAGE();
4033 IEM_MC_FPU_TO_MMX_MODE();
4034
4035 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttpd2pi_u128, pu64Dst, pSrc);
4036 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4037 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4038
4039 IEM_MC_ADVANCE_RIP_AND_FINISH();
4040 IEM_MC_END();
4041 }
4042}
4043
4044
4045/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4046FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4047{
4048 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4049
4050 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4051 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4052 {
4053 if (IEM_IS_MODRM_REG_MODE(bRm))
4054 {
4055 /* greg64, XMM */
4056 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4057 IEM_MC_LOCAL(int64_t, i64Dst);
4058 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4059 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4060
4061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4062 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4063 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4064
4065 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4066 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i64_r32, pi64Dst, pu32Src);
4067 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4068 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4069
4070 IEM_MC_ADVANCE_RIP_AND_FINISH();
4071 IEM_MC_END();
4072 }
4073 else
4074 {
4075 /* greg64, [mem64] */
4076 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4078 IEM_MC_LOCAL(int64_t, i64Dst);
4079 IEM_MC_LOCAL(uint32_t, u32Src);
4080 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4081 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4082
4083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4085 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4086 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4087
4088 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4089 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i64_r32, pi64Dst, pu32Src);
4090 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4091 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4092
4093 IEM_MC_ADVANCE_RIP_AND_FINISH();
4094 IEM_MC_END();
4095 }
4096 }
4097 else
4098 {
4099 if (IEM_IS_MODRM_REG_MODE(bRm))
4100 {
4101 /* greg, XMM */
4102 IEM_MC_BEGIN(0, 0);
4103 IEM_MC_LOCAL(int32_t, i32Dst);
4104 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4105 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4106
4107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4108 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4109 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4110
4111 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4112 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i32_r32, pi32Dst, pu32Src);
4113 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4114 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4115
4116 IEM_MC_ADVANCE_RIP_AND_FINISH();
4117 IEM_MC_END();
4118 }
4119 else
4120 {
4121 /* greg, [mem] */
4122 IEM_MC_BEGIN(0, 0);
4123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4124 IEM_MC_LOCAL(int32_t, i32Dst);
4125 IEM_MC_LOCAL(uint32_t, u32Src);
4126 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4127 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4128
4129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4131 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4132 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4133
4134 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4135 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttss2si_i32_r32, pi32Dst, pu32Src);
4136 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4137 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4138
4139 IEM_MC_ADVANCE_RIP_AND_FINISH();
4140 IEM_MC_END();
4141 }
4142 }
4143}
4144
4145
4146/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4147FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4148{
4149 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4150
4151 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4152 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4153 {
4154 if (IEM_IS_MODRM_REG_MODE(bRm))
4155 {
4156 /* greg64, XMM */
4157 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4158 IEM_MC_LOCAL(int64_t, i64Dst);
4159 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4160 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4161
4162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4163 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4164 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4165
4166 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4167 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i64_r64, pi64Dst, pu64Src);
4168 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4169 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4170
4171 IEM_MC_ADVANCE_RIP_AND_FINISH();
4172 IEM_MC_END();
4173 }
4174 else
4175 {
4176 /* greg64, [mem64] */
4177 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4179 IEM_MC_LOCAL(int64_t, i64Dst);
4180 IEM_MC_LOCAL(uint64_t, u64Src);
4181 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4182 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4183
4184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4186 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4187 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4188
4189 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4190 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i64_r64, pi64Dst, pu64Src);
4191 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4192 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4193
4194 IEM_MC_ADVANCE_RIP_AND_FINISH();
4195 IEM_MC_END();
4196 }
4197 }
4198 else
4199 {
4200 if (IEM_IS_MODRM_REG_MODE(bRm))
4201 {
4202 /* greg, XMM */
4203 IEM_MC_BEGIN(0, 0);
4204 IEM_MC_LOCAL(int32_t, i32Dst);
4205 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4206 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4207
4208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4209 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4210 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4211
4212 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4213 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i32_r64, pi32Dst, pu64Src);
4214 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4215 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4216
4217 IEM_MC_ADVANCE_RIP_AND_FINISH();
4218 IEM_MC_END();
4219 }
4220 else
4221 {
4222 /* greg32, [mem32] */
4223 IEM_MC_BEGIN(0, 0);
4224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4225 IEM_MC_LOCAL(int32_t, i32Dst);
4226 IEM_MC_LOCAL(uint64_t, u64Src);
4227 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4228 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4229
4230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4232 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4233 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4234
4235 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4236 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvttsd2si_i32_r64, pi32Dst, pu64Src);
4237 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4238 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4239
4240 IEM_MC_ADVANCE_RIP_AND_FINISH();
4241 IEM_MC_END();
4242 }
4243 }
4244}
4245
4246
4247/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4248FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4249{
4250 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4251 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4252 if (IEM_IS_MODRM_REG_MODE(bRm))
4253 {
4254 /*
4255 * Register, register.
4256 */
4257 IEM_MC_BEGIN(0, 0);
4258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4259 IEM_MC_LOCAL(uint64_t, u64Dst);
4260 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4261 IEM_MC_ARG(uint64_t, u64Src, 1);
4262
4263 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4264 IEM_MC_PREPARE_FPU_USAGE();
4265 IEM_MC_FPU_TO_MMX_MODE();
4266
4267 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4268
4269 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pi_u128, pu64Dst, u64Src);
4270 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4271 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4272
4273 IEM_MC_ADVANCE_RIP_AND_FINISH();
4274 IEM_MC_END();
4275 }
4276 else
4277 {
4278 /*
4279 * Register, memory.
4280 */
4281 IEM_MC_BEGIN(0, 0);
4282 IEM_MC_LOCAL(uint64_t, u64Dst);
4283 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4284 IEM_MC_ARG(uint64_t, u64Src, 1);
4285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4286
4287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4289 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4290 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4291
4292 IEM_MC_PREPARE_FPU_USAGE();
4293 IEM_MC_FPU_TO_MMX_MODE();
4294
4295 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pi_u128, pu64Dst, u64Src);
4296 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4297 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4298
4299 IEM_MC_ADVANCE_RIP_AND_FINISH();
4300 IEM_MC_END();
4301 }
4302}
4303
4304
4305/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4306FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4307{
4308 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4309 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4310 if (IEM_IS_MODRM_REG_MODE(bRm))
4311 {
4312 /*
4313 * Register, register.
4314 */
4315 IEM_MC_BEGIN(0, 0);
4316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4317 IEM_MC_LOCAL(uint64_t, u64Dst);
4318 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4319 IEM_MC_ARG(PCX86XMMREG, pSrc, 1);
4320
4321 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4322 IEM_MC_PREPARE_FPU_USAGE();
4323 IEM_MC_FPU_TO_MMX_MODE();
4324
4325 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4326
4327 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpd2pi_u128, pu64Dst, pSrc);
4328 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4329 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4330
4331 IEM_MC_ADVANCE_RIP_AND_FINISH();
4332 IEM_MC_END();
4333 }
4334 else
4335 {
4336 /*
4337 * Register, memory.
4338 */
4339 IEM_MC_BEGIN(0, 0);
4340 IEM_MC_LOCAL(uint64_t, u64Dst);
4341 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 0);
4342 IEM_MC_LOCAL(X86XMMREG, uSrc);
4343 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 1);
4344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4345
4346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4348 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4349 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4350
4351 IEM_MC_PREPARE_FPU_USAGE();
4352 IEM_MC_FPU_TO_MMX_MODE();
4353
4354 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtpd2pi_u128, pu64Dst, pSrc);
4355 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4356 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4357
4358 IEM_MC_ADVANCE_RIP_AND_FINISH();
4359 IEM_MC_END();
4360 }
4361}
4362
4363
4364/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4365FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4366{
4367 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4368
4369 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4370 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4371 {
4372 if (IEM_IS_MODRM_REG_MODE(bRm))
4373 {
4374 /* greg64, XMM */
4375 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4376 IEM_MC_LOCAL(int64_t, i64Dst);
4377 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4378 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4379
4380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4381 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4382 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4383
4384 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4385 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i64_r32, pi64Dst, pu32Src);
4386 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4387 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4388
4389 IEM_MC_ADVANCE_RIP_AND_FINISH();
4390 IEM_MC_END();
4391 }
4392 else
4393 {
4394 /* greg64, [mem64] */
4395 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4397 IEM_MC_LOCAL(int64_t, i64Dst);
4398 IEM_MC_LOCAL(uint32_t, u32Src);
4399 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4400 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4401
4402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4404 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4405 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4406
4407 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4408 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i64_r32, pi64Dst, pu32Src);
4409 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4410 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4411
4412 IEM_MC_ADVANCE_RIP_AND_FINISH();
4413 IEM_MC_END();
4414 }
4415 }
4416 else
4417 {
4418 if (IEM_IS_MODRM_REG_MODE(bRm))
4419 {
4420 /* greg, XMM */
4421 IEM_MC_BEGIN(0, 0);
4422 IEM_MC_LOCAL(int32_t, i32Dst);
4423 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4424 IEM_MC_ARG(const uint32_t *, pu32Src, 1);
4425
4426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4427 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4428 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4429
4430 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4431 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i32_r32, pi32Dst, pu32Src);
4432 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4433 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4434
4435 IEM_MC_ADVANCE_RIP_AND_FINISH();
4436 IEM_MC_END();
4437 }
4438 else
4439 {
4440 /* greg, [mem] */
4441 IEM_MC_BEGIN(0, 0);
4442 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4443 IEM_MC_LOCAL(int32_t, i32Dst);
4444 IEM_MC_LOCAL(uint32_t, u32Src);
4445 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4446 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 1);
4447
4448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4450 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4451 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4452
4453 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4454 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtss2si_i32_r32, pi32Dst, pu32Src);
4455 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4456 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4457
4458 IEM_MC_ADVANCE_RIP_AND_FINISH();
4459 IEM_MC_END();
4460 }
4461 }
4462}
4463
4464
4465/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4466FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4467{
4468 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4469
4470 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4471 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4472 {
4473 if (IEM_IS_MODRM_REG_MODE(bRm))
4474 {
4475 /* greg64, XMM */
4476 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4477 IEM_MC_LOCAL(int64_t, i64Dst);
4478 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4479 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4480
4481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4482 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4483 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4484
4485 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4486 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i64_r64, pi64Dst, pu64Src);
4487 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4488 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4489
4490 IEM_MC_ADVANCE_RIP_AND_FINISH();
4491 IEM_MC_END();
4492 }
4493 else
4494 {
4495 /* greg64, [mem64] */
4496 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4498 IEM_MC_LOCAL(int64_t, i64Dst);
4499 IEM_MC_LOCAL(uint64_t, u64Src);
4500 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0);
4501 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4502
4503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4505 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4506 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4507
4508 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4509 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i64_r64, pi64Dst, pu64Src);
4510 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4511 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4512
4513 IEM_MC_ADVANCE_RIP_AND_FINISH();
4514 IEM_MC_END();
4515 }
4516 }
4517 else
4518 {
4519 if (IEM_IS_MODRM_REG_MODE(bRm))
4520 {
4521 /* greg32, XMM */
4522 IEM_MC_BEGIN(0, 0);
4523 IEM_MC_LOCAL(int32_t, i32Dst);
4524 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4525 IEM_MC_ARG(const uint64_t *, pu64Src, 1);
4526
4527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4528 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4529 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4530
4531 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4532 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i32_r64, pi32Dst, pu64Src);
4533 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4534 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4535
4536 IEM_MC_ADVANCE_RIP_AND_FINISH();
4537 IEM_MC_END();
4538 }
4539 else
4540 {
4541 /* greg32, [mem64] */
4542 IEM_MC_BEGIN(0, 0);
4543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4544 IEM_MC_LOCAL(int32_t, i32Dst);
4545 IEM_MC_LOCAL(uint64_t, u64Src);
4546 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0);
4547 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
4548
4549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4551 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4552 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_2() is calling this but the tstIEMCheckMc testcase depends on it. */
4553
4554 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4555 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtsd2si_i32_r64, pi32Dst, pu64Src);
4556 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4557 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4558
4559 IEM_MC_ADVANCE_RIP_AND_FINISH();
4560 IEM_MC_END();
4561 }
4562 }
4563}
4564
4565
4566/**
4567 * @opcode 0x2e
4568 * @oppfx none
4569 * @opflmodify cf,pf,af,zf,sf,of
4570 * @opflclear af,sf,of
4571 */
4572FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4573{
4574 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4576 if (IEM_IS_MODRM_REG_MODE(bRm))
4577 {
4578 /*
4579 * Register, register.
4580 */
4581 IEM_MC_BEGIN(0, 0);
4582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4583 IEM_MC_LOCAL(uint32_t, fEFlags);
4584 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4585 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4586 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4587 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4588 IEM_MC_PREPARE_SSE_USAGE();
4589 IEM_MC_FETCH_EFLAGS(fEFlags);
4590 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4591 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
4592 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomiss_u128, pEFlags, uSrc1, uSrc2);
4593 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4594 IEM_MC_COMMIT_EFLAGS(fEFlags);
4595
4596 IEM_MC_ADVANCE_RIP_AND_FINISH();
4597 IEM_MC_END();
4598 }
4599 else
4600 {
4601 /*
4602 * Register, memory.
4603 */
4604 IEM_MC_BEGIN(0, 0);
4605 IEM_MC_LOCAL(uint32_t, fEFlags);
4606 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4607 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4608 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4610
4611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4613 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4614 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4615
4616 IEM_MC_PREPARE_SSE_USAGE();
4617 IEM_MC_FETCH_EFLAGS(fEFlags);
4618 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4619 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomiss_u128, pEFlags, uSrc1, uSrc2);
4620 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4621 IEM_MC_COMMIT_EFLAGS(fEFlags);
4622
4623 IEM_MC_ADVANCE_RIP_AND_FINISH();
4624 IEM_MC_END();
4625 }
4626}
4627
4628
4629/**
4630 * @opcode 0x2e
4631 * @oppfx 0x66
4632 * @opflmodify cf,pf,af,zf,sf,of
4633 * @opflclear af,sf,of
4634 */
4635FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4636{
4637 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4638 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4639 if (IEM_IS_MODRM_REG_MODE(bRm))
4640 {
4641 /*
4642 * Register, register.
4643 */
4644 IEM_MC_BEGIN(0, 0);
4645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4646 IEM_MC_LOCAL(uint32_t, fEFlags);
4647 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4648 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4649 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4650 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4651 IEM_MC_PREPARE_SSE_USAGE();
4652 IEM_MC_FETCH_EFLAGS(fEFlags);
4653 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4654 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
4655 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomisd_u128, pEFlags, uSrc1, uSrc2);
4656 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4657 IEM_MC_COMMIT_EFLAGS(fEFlags);
4658
4659 IEM_MC_ADVANCE_RIP_AND_FINISH();
4660 IEM_MC_END();
4661 }
4662 else
4663 {
4664 /*
4665 * Register, memory.
4666 */
4667 IEM_MC_BEGIN(0, 0);
4668 IEM_MC_LOCAL(uint32_t, fEFlags);
4669 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4670 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4671 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4673
4674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4676 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4677 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4678
4679 IEM_MC_PREPARE_SSE_USAGE();
4680 IEM_MC_FETCH_EFLAGS(fEFlags);
4681 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4682 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_ucomisd_u128, pEFlags, uSrc1, uSrc2);
4683 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4684 IEM_MC_COMMIT_EFLAGS(fEFlags);
4685
4686 IEM_MC_ADVANCE_RIP_AND_FINISH();
4687 IEM_MC_END();
4688 }
4689}
4690
4691
4692/* Opcode 0xf3 0x0f 0x2e - invalid */
4693/* Opcode 0xf2 0x0f 0x2e - invalid */
4694
4695
4696/**
4697 * @opcode 0x2e
4698 * @oppfx none
4699 * @opflmodify cf,pf,af,zf,sf,of
4700 * @opflclear af,sf,of
4701 */
4702FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
4703{
4704 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4705 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4706 if (IEM_IS_MODRM_REG_MODE(bRm))
4707 {
4708 /*
4709 * Register, register.
4710 */
4711 IEM_MC_BEGIN(0, 0);
4712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4713 IEM_MC_LOCAL(uint32_t, fEFlags);
4714 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4715 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4716 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4717 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4718 IEM_MC_PREPARE_SSE_USAGE();
4719 IEM_MC_FETCH_EFLAGS(fEFlags);
4720 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4721 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
4722 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comiss_u128, pEFlags, uSrc1, uSrc2);
4723 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4724 IEM_MC_COMMIT_EFLAGS(fEFlags);
4725
4726 IEM_MC_ADVANCE_RIP_AND_FINISH();
4727 IEM_MC_END();
4728 }
4729 else
4730 {
4731 /*
4732 * Register, memory.
4733 */
4734 IEM_MC_BEGIN(0, 0);
4735 IEM_MC_LOCAL(uint32_t, fEFlags);
4736 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4737 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
4738 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
4739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4740
4741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4743 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4744 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4745
4746 IEM_MC_PREPARE_SSE_USAGE();
4747 IEM_MC_FETCH_EFLAGS(fEFlags);
4748 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
4749 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comiss_u128, pEFlags, uSrc1, uSrc2);
4750 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4751 IEM_MC_COMMIT_EFLAGS(fEFlags);
4752
4753 IEM_MC_ADVANCE_RIP_AND_FINISH();
4754 IEM_MC_END();
4755 }
4756}
4757
4758
4759/**
4760 * @opcode 0x2f
4761 * @oppfx 0x66
4762 * @opflmodify cf,pf,af,zf,sf,of
4763 * @opflclear af,sf,of
4764 */
4765FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
4766{
4767 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4768 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4769 if (IEM_IS_MODRM_REG_MODE(bRm))
4770 {
4771 /*
4772 * Register, register.
4773 */
4774 IEM_MC_BEGIN(0, 0);
4775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4776 IEM_MC_LOCAL(uint32_t, fEFlags);
4777 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4778 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4779 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4780 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4781 IEM_MC_PREPARE_SSE_USAGE();
4782 IEM_MC_FETCH_EFLAGS(fEFlags);
4783 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4784 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
4785 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comisd_u128, pEFlags, uSrc1, uSrc2);
4786 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4787 IEM_MC_COMMIT_EFLAGS(fEFlags);
4788
4789 IEM_MC_ADVANCE_RIP_AND_FINISH();
4790 IEM_MC_END();
4791 }
4792 else
4793 {
4794 /*
4795 * Register, memory.
4796 */
4797 IEM_MC_BEGIN(0, 0);
4798 IEM_MC_LOCAL(uint32_t, fEFlags);
4799 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
4800 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
4801 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
4802 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4803
4804 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4806 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4807 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4808
4809 IEM_MC_PREPARE_SSE_USAGE();
4810 IEM_MC_FETCH_EFLAGS(fEFlags);
4811 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4812 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_comisd_u128, pEFlags, uSrc1, uSrc2);
4813 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4814 IEM_MC_COMMIT_EFLAGS(fEFlags);
4815
4816 IEM_MC_ADVANCE_RIP_AND_FINISH();
4817 IEM_MC_END();
4818 }
4819}
4820
4821
4822/* Opcode 0xf3 0x0f 0x2f - invalid */
4823/* Opcode 0xf2 0x0f 0x2f - invalid */
4824
4825/** Opcode 0x0f 0x30. */
4826FNIEMOP_DEF(iemOp_wrmsr)
4827{
4828 IEMOP_MNEMONIC(wrmsr, "wrmsr");
4829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4830 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wrmsr);
4831}
4832
4833
4834/** Opcode 0x0f 0x31. */
4835FNIEMOP_DEF(iemOp_rdtsc)
4836{
4837 IEMOP_MNEMONIC(rdtsc, "rdtsc");
4838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4839 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4840 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4841 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4842 iemCImpl_rdtsc);
4843}
4844
4845
4846/** Opcode 0x0f 0x33. */
4847FNIEMOP_DEF(iemOp_rdmsr)
4848{
4849 IEMOP_MNEMONIC(rdmsr, "rdmsr");
4850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4851 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4852 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4853 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4854 iemCImpl_rdmsr);
4855}
4856
4857
4858/** Opcode 0x0f 0x34. */
4859FNIEMOP_DEF(iemOp_rdpmc)
4860{
4861 IEMOP_MNEMONIC(rdpmc, "rdpmc");
4862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4863 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
4864 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
4865 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
4866 iemCImpl_rdpmc);
4867}
4868
4869
4870/** Opcode 0x0f 0x34. */
4871FNIEMOP_DEF(iemOp_sysenter)
4872{
4873 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4875 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
4876 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
4877 iemCImpl_sysenter);
4878}
4879
4880/** Opcode 0x0f 0x35. */
4881FNIEMOP_DEF(iemOp_sysexit)
4882{
4883 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
4884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4885 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
4886 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
4887 iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
4888}
4889
4890/** Opcode 0x0f 0x37. */
4891FNIEMOP_STUB(iemOp_getsec);
4892
4893
4894/** Opcode 0x0f 0x38. */
4895FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
4896{
4897#ifdef IEM_WITH_THREE_0F_38
4898 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4899 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4900#else
4901 IEMOP_BITCH_ABOUT_STUB();
4902 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4903#endif
4904}
4905
4906
4907/** Opcode 0x0f 0x3a. */
4908FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
4909{
4910#ifdef IEM_WITH_THREE_0F_3A
4911 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4912 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
4913#else
4914 IEMOP_BITCH_ABOUT_STUB();
4915 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4916#endif
4917}
4918
4919
4920/**
4921 * Implements a conditional move.
4922 *
4923 * Wish there was an obvious way to do this where we could share and reduce
4924 * code bloat.
4925 *
4926 * @param a_Cnd The conditional "microcode" operation.
4927 */
4928#define CMOV_X(a_Cnd) \
4929 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
4930 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4931 { \
4932 switch (pVCpu->iem.s.enmEffOpSize) \
4933 { \
4934 case IEMMODE_16BIT: \
4935 IEM_MC_BEGIN(0, 0); \
4936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4937 IEM_MC_LOCAL(uint16_t, u16Tmp); \
4938 a_Cnd { \
4939 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4940 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
4941 } IEM_MC_ENDIF(); \
4942 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4943 IEM_MC_END(); \
4944 break; \
4945 \
4946 case IEMMODE_32BIT: \
4947 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4949 IEM_MC_LOCAL(uint32_t, u32Tmp); \
4950 a_Cnd { \
4951 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4952 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
4953 } IEM_MC_ELSE() { \
4954 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
4955 } IEM_MC_ENDIF(); \
4956 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4957 IEM_MC_END(); \
4958 break; \
4959 \
4960 case IEMMODE_64BIT: \
4961 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
4962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4963 IEM_MC_LOCAL(uint64_t, u64Tmp); \
4964 a_Cnd { \
4965 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4966 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
4967 } IEM_MC_ENDIF(); \
4968 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4969 IEM_MC_END(); \
4970 break; \
4971 \
4972 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4973 } \
4974 } \
4975 else \
4976 { \
4977 switch (pVCpu->iem.s.enmEffOpSize) \
4978 { \
4979 case IEMMODE_16BIT: \
4980 IEM_MC_BEGIN(0, 0); \
4981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4982 IEM_MC_LOCAL(uint16_t, u16Tmp); \
4983 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4985 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4986 a_Cnd { \
4987 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
4988 } IEM_MC_ENDIF(); \
4989 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4990 IEM_MC_END(); \
4991 break; \
4992 \
4993 case IEMMODE_32BIT: \
4994 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
4995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4996 IEM_MC_LOCAL(uint32_t, u32Tmp); \
4997 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4999 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5000 a_Cnd { \
5001 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5002 } IEM_MC_ELSE() { \
5003 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5004 } IEM_MC_ENDIF(); \
5005 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5006 IEM_MC_END(); \
5007 break; \
5008 \
5009 case IEMMODE_64BIT: \
5010 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
5011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5012 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5015 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5016 a_Cnd { \
5017 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5018 } IEM_MC_ENDIF(); \
5019 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5020 IEM_MC_END(); \
5021 break; \
5022 \
5023 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5024 } \
5025 } do {} while (0)
5026
5027
5028
5029/**
5030 * @opcode 0x40
5031 * @opfltest of
5032 */
5033FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5034{
5035 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5036 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5037}
5038
5039
5040/**
5041 * @opcode 0x41
5042 * @opfltest of
5043 */
5044FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5045{
5046 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5047 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5048}
5049
5050
5051/**
5052 * @opcode 0x42
5053 * @opfltest cf
5054 */
5055FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5056{
5057 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5058 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5059}
5060
5061
5062/**
5063 * @opcode 0x43
5064 * @opfltest cf
5065 */
5066FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5067{
5068 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5069 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5070}
5071
5072
5073/**
5074 * @opcode 0x44
5075 * @opfltest zf
5076 */
5077FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5078{
5079 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5080 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5081}
5082
5083
5084/**
5085 * @opcode 0x45
5086 * @opfltest zf
5087 */
5088FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5089{
5090 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5091 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5092}
5093
5094
5095/**
5096 * @opcode 0x46
5097 * @opfltest cf,zf
5098 */
5099FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5100{
5101 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5102 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5103}
5104
5105
5106/**
5107 * @opcode 0x47
5108 * @opfltest cf,zf
5109 */
5110FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5111{
5112 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5113 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5114}
5115
5116
5117/**
5118 * @opcode 0x48
5119 * @opfltest sf
5120 */
5121FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5122{
5123 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5124 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5125}
5126
5127
5128/**
5129 * @opcode 0x49
5130 * @opfltest sf
5131 */
5132FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5133{
5134 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5135 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5136}
5137
5138
5139/**
5140 * @opcode 0x4a
5141 * @opfltest pf
5142 */
5143FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5144{
5145 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5146 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5147}
5148
5149
5150/**
5151 * @opcode 0x4b
5152 * @opfltest pf
5153 */
5154FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5155{
5156 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5157 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5158}
5159
5160
5161/**
5162 * @opcode 0x4c
5163 * @opfltest sf,of
5164 */
5165FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5166{
5167 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5168 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5169}
5170
5171
5172/**
5173 * @opcode 0x4d
5174 * @opfltest sf,of
5175 */
5176FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5177{
5178 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5179 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5180}
5181
5182
5183/**
5184 * @opcode 0x4e
5185 * @opfltest zf,sf,of
5186 */
5187FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5188{
5189 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5190 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5191}
5192
5193
5194/**
5195 * @opcode 0x4e
5196 * @opfltest zf,sf,of
5197 */
5198FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5199{
5200 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5201 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5202}
5203
5204#undef CMOV_X
5205
5206/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5207FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5208{
5209 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5210 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5211 if (IEM_IS_MODRM_REG_MODE(bRm))
5212 {
5213 /*
5214 * Register, register.
5215 */
5216 IEM_MC_BEGIN(0, 0);
5217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5218 IEM_MC_LOCAL(uint8_t, u8Dst);
5219 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5220 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5221 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5222 IEM_MC_PREPARE_SSE_USAGE();
5223 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5224 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5225 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5226 IEM_MC_ADVANCE_RIP_AND_FINISH();
5227 IEM_MC_END();
5228 }
5229 /* No memory operand. */
5230 else
5231 IEMOP_RAISE_INVALID_OPCODE_RET();
5232}
5233
5234
5235/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5236FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5237{
5238 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5239 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5240 if (IEM_IS_MODRM_REG_MODE(bRm))
5241 {
5242 /*
5243 * Register, register.
5244 */
5245 IEM_MC_BEGIN(0, 0);
5246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5247 IEM_MC_LOCAL(uint8_t, u8Dst);
5248 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5249 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5250 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5251 IEM_MC_PREPARE_SSE_USAGE();
5252 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5253 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5254 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5255 IEM_MC_ADVANCE_RIP_AND_FINISH();
5256 IEM_MC_END();
5257 }
5258 /* No memory operand. */
5259 else
5260 IEMOP_RAISE_INVALID_OPCODE_RET();
5261
5262}
5263
5264
5265/* Opcode 0xf3 0x0f 0x50 - invalid */
5266/* Opcode 0xf2 0x0f 0x50 - invalid */
5267
5268
5269/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5270FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5271{
5272 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5273 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5274}
5275
5276
5277/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5278FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5279{
5280 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5281 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5282}
5283
5284
5285/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5286FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5287{
5288 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5289 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5290}
5291
5292
5293/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5294FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5295{
5296 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5297 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5298}
5299
5300
5301/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5302FNIEMOP_DEF(iemOp_rsqrtps_Vps_Wps)
5303{
5304 IEMOP_MNEMONIC2(RM, RSQRTPS, rsqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5305 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rsqrtps_u128);
5306}
5307
5308
5309/* Opcode 0x66 0x0f 0x52 - invalid */
5310
5311
5312/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5313FNIEMOP_DEF(iemOp_rsqrtss_Vss_Wss)
5314{
5315 IEMOP_MNEMONIC2(RM, RSQRTSS, rsqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5316 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rsqrtss_u128_r32);
5317}
5318
5319
5320/* Opcode 0xf2 0x0f 0x52 - invalid */
5321
5322
5323/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5324FNIEMOP_DEF(iemOp_rcpps_Vps_Wps)
5325{
5326 IEMOP_MNEMONIC2(RM, RCPPS, rcpps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5327 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rcpps_u128);
5328}
5329
5330
5331/* Opcode 0x66 0x0f 0x53 - invalid */
5332
5333
5334/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5335FNIEMOP_DEF(iemOp_rcpss_Vss_Wss)
5336{
5337 IEMOP_MNEMONIC2(RM, RCPSS, rcpss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5338 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rcpss_u128_r32);
5339}
5340
5341
5342/* Opcode 0xf2 0x0f 0x53 - invalid */
5343
5344
5345/** Opcode 0x0f 0x54 - andps Vps, Wps */
5346FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5347{
5348 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5349 SSE2_OPT_BODY_FullFull_To_Full(pand, iemAImpl_pand_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5350}
5351
5352
5353/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5354FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5355{
5356 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5357 SSE2_OPT_BODY_FullFull_To_Full(pand, iemAImpl_pand_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5358}
5359
5360
5361/* Opcode 0xf3 0x0f 0x54 - invalid */
5362/* Opcode 0xf2 0x0f 0x54 - invalid */
5363
5364
5365/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5366FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5367{
5368 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5369 return FNIEMOP_CALL_1(iemOpCommonSseOpt_FullFull_To_Full, iemAImpl_pandn_u128);
5370}
5371
5372
5373/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5374FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5375{
5376 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5377 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pandn_u128);
5378}
5379
5380
5381/* Opcode 0xf3 0x0f 0x55 - invalid */
5382/* Opcode 0xf2 0x0f 0x55 - invalid */
5383
5384
5385/** Opcode 0x0f 0x56 - orps Vps, Wps */
5386FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5387{
5388 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5389 SSE2_OPT_BODY_FullFull_To_Full(por, iemAImpl_por_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5390}
5391
5392
5393/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5394FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5395{
5396 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5397 SSE2_OPT_BODY_FullFull_To_Full(por, iemAImpl_por_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5398}
5399
5400
5401/* Opcode 0xf3 0x0f 0x56 - invalid */
5402/* Opcode 0xf2 0x0f 0x56 - invalid */
5403
5404
5405/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5406FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5407{
5408 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5409 SSE2_OPT_BODY_FullFull_To_Full(pxor, iemAImpl_pxor_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5410}
5411
5412
5413/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5414FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5415{
5416 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5417 SSE2_OPT_BODY_FullFull_To_Full(pxor, iemAImpl_pxor_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5418}
5419
5420
5421/* Opcode 0xf3 0x0f 0x57 - invalid */
5422/* Opcode 0xf2 0x0f 0x57 - invalid */
5423
5424/** Opcode 0x0f 0x58 - addps Vps, Wps */
5425FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5426{
5427 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5428 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5429}
5430
5431
5432/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5433FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5434{
5435 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5436 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5437}
5438
5439
5440/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5441FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5442{
5443 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5444 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5445}
5446
5447
5448/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5449FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5450{
5451 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5452 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5453}
5454
5455
5456/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5457FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5458{
5459 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5460 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5461}
5462
5463
5464/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5465FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5466{
5467 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5468 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5469}
5470
5471
5472/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5473FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5474{
5475 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5476 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5477}
5478
5479
5480/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5481FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5482{
5483 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5484 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5485}
5486
5487
5488/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5489FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5490{
5491 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd_WO, Wps, DISOPTYPE_HARMLESS, 0);
5492 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5493 if (IEM_IS_MODRM_REG_MODE(bRm))
5494 {
5495 /*
5496 * XMM, XMM[63:0].
5497 */
5498 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
5499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5500 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5501 IEM_MC_PREPARE_SSE_USAGE();
5502
5503 IEM_MC_LOCAL(X86XMMREG, SseRes);
5504 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
5505 IEM_MC_ARG(uint64_t const *, pu64Src, 1); /* The input is actually two 32-bit float values, */
5506 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); /* but we've got no matching type or MC. */
5507 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pd_u128, pSseRes, pu64Src);
5508 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5509 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
5510
5511 IEM_MC_ADVANCE_RIP_AND_FINISH();
5512 IEM_MC_END();
5513 }
5514 else
5515 {
5516 /*
5517 * XMM, [mem64].
5518 */
5519 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
5520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5523 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5524
5525 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5526 IEM_MC_ARG(uint64_t const *, pu64Src, 1); /* (see comment above wrt type) */
5527 IEM_MC_MEM_MAP_U64_RO(pu64Src, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5528
5529 IEM_MC_PREPARE_SSE_USAGE();
5530 IEM_MC_LOCAL(X86XMMREG, SseRes);
5531 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pSseRes, SseRes, 0);
5532 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_cvtps2pd_u128, pSseRes, pu64Src);
5533 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
5534
5535 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5536 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes);
5537
5538 IEM_MC_ADVANCE_RIP_AND_FINISH();
5539 IEM_MC_END();
5540 }
5541}
5542
5543
5544/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5545FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5546{
5547 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps_WO, Wpd, DISOPTYPE_HARMLESS, 0);
5548 /** @todo inefficient as we don't need to fetch the destination (write-only). */
5549 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5550}
5551
5552
5553/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5554FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5555{
5556 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5557 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5558}
5559
5560
5561/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5562FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5563{
5564 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5565 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5566}
5567
5568
5569/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5570FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5571{
5572 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5573 /** @todo inefficient as we don't need to fetch the destination (write-only). */
5574 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5575}
5576
5577
5578/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5579FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5580{
5581 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5582 /** @todo inefficient as we don't need to fetch the destination (write-only). */
5583 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5584}
5585
5586
5587/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5588FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5589{
5590 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5591 /** @todo inefficient as we don't need to fetch the destination (write-only). */
5592 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5593}
5594
5595
5596/* Opcode 0xf2 0x0f 0x5b - invalid */
5597
5598
5599/** Opcode 0x0f 0x5c - subps Vps, Wps */
5600FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5601{
5602 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5603 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5604}
5605
5606
5607/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5608FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5609{
5610 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5611 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5612}
5613
5614
5615/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5616FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5617{
5618 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5619 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5620}
5621
5622
5623/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5624FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5625{
5626 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5627 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5628}
5629
5630
5631/** Opcode 0x0f 0x5d - minps Vps, Wps */
5632FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5633{
5634 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5635 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5636}
5637
5638
5639/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5640FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5641{
5642 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5643 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5644}
5645
5646
5647/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5648FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5649{
5650 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5651 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5652}
5653
5654
5655/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5656FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5657{
5658 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5659 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5660}
5661
5662
5663/** Opcode 0x0f 0x5e - divps Vps, Wps */
5664FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5665{
5666 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5667 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5668}
5669
5670
5671/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5672FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5673{
5674 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5675 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5676}
5677
5678
5679/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5680FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5681{
5682 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5683 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5684}
5685
5686
5687/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5688FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5689{
5690 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5691 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5692}
5693
5694
5695/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5696FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5697{
5698 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5699 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5700}
5701
5702
5703/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5704FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5705{
5706 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5707 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5708}
5709
5710
5711/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5712FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5713{
5714 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5715 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5716}
5717
5718
5719/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5720FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5721{
5722 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5723 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5724}
5725
5726
5727/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5728FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5729{
5730 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5731 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5732}
5733
5734
5735/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5736FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5737{
5738 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5739 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5740}
5741
5742
5743/* Opcode 0xf3 0x0f 0x60 - invalid */
5744
5745
5746/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5747FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5748{
5749 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5750 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5751 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5752}
5753
5754
5755/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5756FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5757{
5758 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5759 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5760}
5761
5762
5763/* Opcode 0xf3 0x0f 0x61 - invalid */
5764
5765
5766/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5767FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5768{
5769 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5770 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5771}
5772
5773
5774/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5775FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5776{
5777 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5778 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5779}
5780
5781
5782/* Opcode 0xf3 0x0f 0x62 - invalid */
5783
5784
5785
5786/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5787FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5788{
5789 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5790 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5791}
5792
5793
5794/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5795FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5796{
5797 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5798 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5799}
5800
5801
5802/* Opcode 0xf3 0x0f 0x63 - invalid */
5803
5804
5805/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5806FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5807{
5808 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5809 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
5810}
5811
5812
5813/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
5814FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
5815{
5816 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5817 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
5818}
5819
5820
5821/* Opcode 0xf3 0x0f 0x64 - invalid */
5822
5823
5824/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
5825FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
5826{
5827 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5828 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
5829}
5830
5831
5832/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
5833FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
5834{
5835 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5836 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
5837}
5838
5839
5840/* Opcode 0xf3 0x0f 0x65 - invalid */
5841
5842
5843/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
5844FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
5845{
5846 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5847 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
5848}
5849
5850
5851/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
5852FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
5853{
5854 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5855 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
5856}
5857
5858
5859/* Opcode 0xf3 0x0f 0x66 - invalid */
5860
5861
5862/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
5863FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
5864{
5865 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5866 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
5867}
5868
5869
5870/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
5871FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
5872{
5873 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5874 SSE2_OPT_BODY_FullFull_To_Full(packuswb, iemAImpl_packuswb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5875}
5876
5877
5878/* Opcode 0xf3 0x0f 0x67 - invalid */
5879
5880
5881/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
5882 * @note Intel and AMD both uses Qd for the second parameter, however they
5883 * both list it as a mmX/mem64 operand and intel describes it as being
5884 * loaded as a qword, so it should be Qq, shouldn't it? */
5885FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
5886{
5887 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5888 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
5889}
5890
5891
5892/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
5893FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
5894{
5895 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5896 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
5897}
5898
5899
5900/* Opcode 0xf3 0x0f 0x68 - invalid */
5901
5902
5903/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
5904 * @note Intel and AMD both uses Qd for the second parameter, however they
5905 * both list it as a mmX/mem64 operand and intel describes it as being
5906 * loaded as a qword, so it should be Qq, shouldn't it? */
5907FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
5908{
5909 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5910 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
5911}
5912
5913
5914/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
5915FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
5916{
5917 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5918 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
5919
5920}
5921
5922
5923/* Opcode 0xf3 0x0f 0x69 - invalid */
5924
5925
5926/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
5927 * @note Intel and AMD both uses Qd for the second parameter, however they
5928 * both list it as a mmX/mem64 operand and intel describes it as being
5929 * loaded as a qword, so it should be Qq, shouldn't it? */
5930FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
5931{
5932 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5933 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
5934}
5935
5936
5937/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
5938FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
5939{
5940 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5941 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
5942}
5943
5944
5945/* Opcode 0xf3 0x0f 0x6a - invalid */
5946
5947
5948/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
5949FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
5950{
5951 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5952 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
5953}
5954
5955
5956/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
5957FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
5958{
5959 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5960 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
5961}
5962
5963
5964/* Opcode 0xf3 0x0f 0x6b - invalid */
5965
5966
5967/* Opcode 0x0f 0x6c - invalid */
5968
5969
5970/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
5971FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
5972{
5973 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5974 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
5975}
5976
5977
5978/* Opcode 0xf3 0x0f 0x6c - invalid */
5979/* Opcode 0xf2 0x0f 0x6c - invalid */
5980
5981
5982/* Opcode 0x0f 0x6d - invalid */
5983
5984
5985/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
5986FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
5987{
5988 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5989 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
5990}
5991
5992
5993/* Opcode 0xf3 0x0f 0x6d - invalid */
5994
5995
5996FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
5997{
5998 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5999 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6000 {
6001 /**
6002 * @opcode 0x6e
6003 * @opcodesub rex.w=1
6004 * @oppfx none
6005 * @opcpuid mmx
6006 * @opgroup og_mmx_datamove
6007 * @opxcpttype 5
6008 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6009 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6010 */
6011 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6012 if (IEM_IS_MODRM_REG_MODE(bRm))
6013 {
6014 /* MMX, greg64 */
6015 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6017 IEM_MC_LOCAL(uint64_t, u64Tmp);
6018
6019 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6020 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6021 IEM_MC_FPU_TO_MMX_MODE();
6022
6023 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6024 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6025
6026 IEM_MC_ADVANCE_RIP_AND_FINISH();
6027 IEM_MC_END();
6028 }
6029 else
6030 {
6031 /* MMX, [mem64] */
6032 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6033 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6034 IEM_MC_LOCAL(uint64_t, u64Tmp);
6035
6036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6038 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6039 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6040
6041 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6042 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6043 IEM_MC_FPU_TO_MMX_MODE();
6044
6045 IEM_MC_ADVANCE_RIP_AND_FINISH();
6046 IEM_MC_END();
6047 }
6048 }
6049 else
6050 {
6051 /**
6052 * @opdone
6053 * @opcode 0x6e
6054 * @opcodesub rex.w=0
6055 * @oppfx none
6056 * @opcpuid mmx
6057 * @opgroup og_mmx_datamove
6058 * @opxcpttype 5
6059 * @opfunction iemOp_movd_q_Pd_Ey
6060 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6061 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6062 */
6063 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6064 if (IEM_IS_MODRM_REG_MODE(bRm))
6065 {
6066 /* MMX, greg32 */
6067 IEM_MC_BEGIN(0, 0);
6068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6069 IEM_MC_LOCAL(uint32_t, u32Tmp);
6070
6071 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6072 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6073 IEM_MC_FPU_TO_MMX_MODE();
6074
6075 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6076 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6077
6078 IEM_MC_ADVANCE_RIP_AND_FINISH();
6079 IEM_MC_END();
6080 }
6081 else
6082 {
6083 /* MMX, [mem32] */
6084 IEM_MC_BEGIN(0, 0);
6085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6086 IEM_MC_LOCAL(uint32_t, u32Tmp);
6087
6088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6090 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6091 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6092
6093 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6094 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6095 IEM_MC_FPU_TO_MMX_MODE();
6096
6097 IEM_MC_ADVANCE_RIP_AND_FINISH();
6098 IEM_MC_END();
6099 }
6100 }
6101}
6102
6103FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6104{
6105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6106 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6107 {
6108 /**
6109 * @opcode 0x6e
6110 * @opcodesub rex.w=1
6111 * @oppfx 0x66
6112 * @opcpuid sse2
6113 * @opgroup og_sse2_simdint_datamove
6114 * @opxcpttype 5
6115 * @optest 64-bit / op1=1 op2=2 -> op1=2
6116 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6117 */
6118 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6119 if (IEM_IS_MODRM_REG_MODE(bRm))
6120 {
6121 /* XMM, greg64 */
6122 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6124 IEM_MC_LOCAL(uint64_t, u64Tmp);
6125
6126 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6127 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6128
6129 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6130 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6131
6132 IEM_MC_ADVANCE_RIP_AND_FINISH();
6133 IEM_MC_END();
6134 }
6135 else
6136 {
6137 /* XMM, [mem64] */
6138 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6140 IEM_MC_LOCAL(uint64_t, u64Tmp);
6141
6142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6144 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6145 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6146
6147 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6148 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6149
6150 IEM_MC_ADVANCE_RIP_AND_FINISH();
6151 IEM_MC_END();
6152 }
6153 }
6154 else
6155 {
6156 /**
6157 * @opdone
6158 * @opcode 0x6e
6159 * @opcodesub rex.w=0
6160 * @oppfx 0x66
6161 * @opcpuid sse2
6162 * @opgroup og_sse2_simdint_datamove
6163 * @opxcpttype 5
6164 * @opfunction iemOp_movd_q_Vy_Ey
6165 * @optest op1=1 op2=2 -> op1=2
6166 * @optest op1=0 op2=-42 -> op1=-42
6167 */
6168 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6169 if (IEM_IS_MODRM_REG_MODE(bRm))
6170 {
6171 /* XMM, greg32 */
6172 IEM_MC_BEGIN(0, 0);
6173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6174 IEM_MC_LOCAL(uint32_t, u32Tmp);
6175
6176 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6177 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6178
6179 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6180 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6181
6182 IEM_MC_ADVANCE_RIP_AND_FINISH();
6183 IEM_MC_END();
6184 }
6185 else
6186 {
6187 /* XMM, [mem32] */
6188 IEM_MC_BEGIN(0, 0);
6189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6190 IEM_MC_LOCAL(uint32_t, u32Tmp);
6191
6192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6194 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6195 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6196
6197 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6198 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6199
6200 IEM_MC_ADVANCE_RIP_AND_FINISH();
6201 IEM_MC_END();
6202 }
6203 }
6204}
6205
6206/* Opcode 0xf3 0x0f 0x6e - invalid */
6207
6208
6209/**
6210 * @opcode 0x6f
6211 * @oppfx none
6212 * @opcpuid mmx
6213 * @opgroup og_mmx_datamove
6214 * @opxcpttype 5
6215 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6216 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6217 */
6218FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6219{
6220 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6222 if (IEM_IS_MODRM_REG_MODE(bRm))
6223 {
6224 /*
6225 * Register, register.
6226 */
6227 IEM_MC_BEGIN(0, 0);
6228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6229 IEM_MC_LOCAL(uint64_t, u64Tmp);
6230
6231 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6232 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6233 IEM_MC_FPU_TO_MMX_MODE();
6234
6235 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6236 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6237
6238 IEM_MC_ADVANCE_RIP_AND_FINISH();
6239 IEM_MC_END();
6240 }
6241 else
6242 {
6243 /*
6244 * Register, memory.
6245 */
6246 IEM_MC_BEGIN(0, 0);
6247 IEM_MC_LOCAL(uint64_t, u64Tmp);
6248 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6249
6250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6252 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6253 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6254
6255 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6256 IEM_MC_FPU_TO_MMX_MODE();
6257
6258 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6259
6260 IEM_MC_ADVANCE_RIP_AND_FINISH();
6261 IEM_MC_END();
6262 }
6263}
6264
6265/**
6266 * @opcode 0x6f
6267 * @oppfx 0x66
6268 * @opcpuid sse2
6269 * @opgroup og_sse2_simdint_datamove
6270 * @opxcpttype 1
6271 * @optest op1=1 op2=2 -> op1=2
6272 * @optest op1=0 op2=-42 -> op1=-42
6273 */
6274FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6275{
6276 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6277 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6278 if (IEM_IS_MODRM_REG_MODE(bRm))
6279 {
6280 /*
6281 * Register, register.
6282 */
6283 IEM_MC_BEGIN(0, 0);
6284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6285
6286 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6287 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6288
6289 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6290 IEM_GET_MODRM_RM(pVCpu, bRm));
6291 IEM_MC_ADVANCE_RIP_AND_FINISH();
6292 IEM_MC_END();
6293 }
6294 else
6295 {
6296 /*
6297 * Register, memory.
6298 */
6299 IEM_MC_BEGIN(0, 0);
6300 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6302
6303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6305 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6306 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6307
6308 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6309 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6310
6311 IEM_MC_ADVANCE_RIP_AND_FINISH();
6312 IEM_MC_END();
6313 }
6314}
6315
6316/**
6317 * @opcode 0x6f
6318 * @oppfx 0xf3
6319 * @opcpuid sse2
6320 * @opgroup og_sse2_simdint_datamove
6321 * @opxcpttype 4UA
6322 * @optest op1=1 op2=2 -> op1=2
6323 * @optest op1=0 op2=-42 -> op1=-42
6324 */
6325FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6326{
6327 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6328 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6329 if (IEM_IS_MODRM_REG_MODE(bRm))
6330 {
6331 /*
6332 * Register, register.
6333 */
6334 IEM_MC_BEGIN(0, 0);
6335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6336 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6337 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6338 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6339 IEM_GET_MODRM_RM(pVCpu, bRm));
6340 IEM_MC_ADVANCE_RIP_AND_FINISH();
6341 IEM_MC_END();
6342 }
6343 else
6344 {
6345 /*
6346 * Register, memory.
6347 */
6348 IEM_MC_BEGIN(0, 0);
6349 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6351
6352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6354 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6355 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6356 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6357 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6358
6359 IEM_MC_ADVANCE_RIP_AND_FINISH();
6360 IEM_MC_END();
6361 }
6362}
6363
6364
6365/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6366FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6367{
6368 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6369 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6370 if (IEM_IS_MODRM_REG_MODE(bRm))
6371 {
6372 /*
6373 * Register, register.
6374 */
6375 IEM_MC_BEGIN(0, 0);
6376 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6378 IEM_MC_ARG(uint64_t *, pDst, 0);
6379 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6380 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6381 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6382 IEM_MC_PREPARE_FPU_USAGE();
6383 IEM_MC_FPU_TO_MMX_MODE();
6384
6385 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6386 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6387 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6388 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6389
6390 IEM_MC_ADVANCE_RIP_AND_FINISH();
6391 IEM_MC_END();
6392 }
6393 else
6394 {
6395 /*
6396 * Register, memory.
6397 */
6398 IEM_MC_BEGIN(0, 0);
6399 IEM_MC_ARG(uint64_t *, pDst, 0);
6400 IEM_MC_LOCAL(uint64_t, uSrc);
6401 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6402 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6403
6404 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6405 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6406 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6408 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6409 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6410
6411 IEM_MC_PREPARE_FPU_USAGE();
6412 IEM_MC_FPU_TO_MMX_MODE();
6413
6414 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6415 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6416 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6417
6418 IEM_MC_ADVANCE_RIP_AND_FINISH();
6419 IEM_MC_END();
6420 }
6421}
6422
6423
6424/**
6425 * Common worker for SSE2 instructions on the forms:
6426 * pshufd xmm1, xmm2/mem128, imm8
6427 * pshufhw xmm1, xmm2/mem128, imm8
6428 * pshuflw xmm1, xmm2/mem128, imm8
6429 *
6430 * Proper alignment of the 128-bit operand is enforced.
6431 * Exceptions type 4. SSE2 cpuid checks.
6432 */
6433FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6434{
6435 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6436 if (IEM_IS_MODRM_REG_MODE(bRm))
6437 {
6438 /*
6439 * Register, register.
6440 */
6441 IEM_MC_BEGIN(0, 0);
6442 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6444 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6445 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6446 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6447 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6448 IEM_MC_PREPARE_SSE_USAGE();
6449 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6450 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6451 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6452 IEM_MC_ADVANCE_RIP_AND_FINISH();
6453 IEM_MC_END();
6454 }
6455 else
6456 {
6457 /*
6458 * Register, memory.
6459 */
6460 IEM_MC_BEGIN(0, 0);
6461 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6462 IEM_MC_LOCAL(RTUINT128U, uSrc);
6463 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6464 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6465
6466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6467 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6468 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6470 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6471
6472 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6473 IEM_MC_PREPARE_SSE_USAGE();
6474 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6475 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6476
6477 IEM_MC_ADVANCE_RIP_AND_FINISH();
6478 IEM_MC_END();
6479 }
6480}
6481
6482
6483/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6484FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6485{
6486 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6487 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6488}
6489
6490
6491/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6492FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6493{
6494 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6495 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6496}
6497
6498
6499/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6500FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6501{
6502 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6503 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6504}
6505
6506
6507/**
6508 * Common worker for MMX instructions of the form:
6509 * psrlw mm, imm8
6510 * psraw mm, imm8
6511 * psllw mm, imm8
6512 * psrld mm, imm8
6513 * psrad mm, imm8
6514 * pslld mm, imm8
6515 * psrlq mm, imm8
6516 * psllq mm, imm8
6517 *
6518 */
6519FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6520{
6521 if (IEM_IS_MODRM_REG_MODE(bRm))
6522 {
6523 /*
6524 * Register, immediate.
6525 */
6526 IEM_MC_BEGIN(0, 0);
6527 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6529 IEM_MC_ARG(uint64_t *, pDst, 0);
6530 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6531 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6532 IEM_MC_PREPARE_FPU_USAGE();
6533 IEM_MC_FPU_TO_MMX_MODE();
6534
6535 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6536 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6537 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6538
6539 IEM_MC_ADVANCE_RIP_AND_FINISH();
6540 IEM_MC_END();
6541 }
6542 else
6543 {
6544 /*
6545 * Register, memory not supported.
6546 */
6547 /// @todo Caller already enforced register mode?!
6548 AssertFailedReturn(VINF_SUCCESS);
6549 }
6550}
6551
6552
6553#if 0 /*unused*/
6554/**
6555 * Common worker for SSE2 instructions of the form:
6556 * psrlw xmm, imm8
6557 * psraw xmm, imm8
6558 * psllw xmm, imm8
6559 * psrld xmm, imm8
6560 * psrad xmm, imm8
6561 * pslld xmm, imm8
6562 * psrlq xmm, imm8
6563 * psllq xmm, imm8
6564 *
6565 */
6566FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6567{
6568 if (IEM_IS_MODRM_REG_MODE(bRm))
6569 {
6570 /*
6571 * Register, immediate.
6572 */
6573 IEM_MC_BEGIN(0, 0);
6574 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6576 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6577 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6578 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6579 IEM_MC_PREPARE_SSE_USAGE();
6580 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6581 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6582 IEM_MC_ADVANCE_RIP_AND_FINISH();
6583 IEM_MC_END();
6584 }
6585 else
6586 {
6587 /*
6588 * Register, memory.
6589 */
6590 /// @todo Caller already enforced register mode?!
6591 AssertFailedReturn(VINF_SUCCESS);
6592 }
6593}
6594#endif
6595
6596
6597/**
6598 * Preprocessor macro variant of iemOpCommonSse2_Shift_Imm
6599 */
6600#define SSE2_SHIFT_BODY_Imm(a_Ins, a_bRm, a_fRegNativeArchs) \
6601 if (IEM_IS_MODRM_REG_MODE((a_bRm))) \
6602 { \
6603 /* \
6604 * Register, immediate. \
6605 */ \
6606 IEM_MC_BEGIN(0, 0); \
6607 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
6608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2); \
6609 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
6610 IEM_MC_PREPARE_SSE_USAGE(); \
6611 IEM_MC_NATIVE_IF(a_fRegNativeArchs) { \
6612 IEM_MC_NATIVE_EMIT_2(RT_CONCAT3(iemNativeEmit_,a_Ins,_ri_u128), IEM_GET_MODRM_RM(pVCpu, (a_bRm)), bImm); \
6613 } IEM_MC_NATIVE_ELSE() { \
6614 IEM_MC_ARG(PRTUINT128U, pDst, 0); \
6615 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1); \
6616 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, (a_bRm))); \
6617 IEM_MC_CALL_VOID_AIMPL_2(RT_CONCAT3(iemAImpl_,a_Ins,_imm_u128), pDst, bShiftArg); \
6618 } IEM_MC_NATIVE_ENDIF(); \
6619 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6620 IEM_MC_END(); \
6621 } \
6622 else \
6623 { \
6624 /* \
6625 * Register, memory. \
6626 */ \
6627 AssertFailedReturn(VINF_SUCCESS); \
6628 } (void)0
6629
6630
6631/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6632FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6633{
6634// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6635 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6636}
6637
6638
6639/** Opcode 0x66 0x0f 0x71 11/2. */
6640FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6641{
6642// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6643 SSE2_SHIFT_BODY_Imm(psrlw, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6644}
6645
6646
6647/** Opcode 0x0f 0x71 11/4. */
6648FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6649{
6650// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6651 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6652}
6653
6654
6655/** Opcode 0x66 0x0f 0x71 11/4. */
6656FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6657{
6658// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6659 SSE2_SHIFT_BODY_Imm(psraw, bRm, 0);
6660}
6661
6662
6663/** Opcode 0x0f 0x71 11/6. */
6664FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6665{
6666// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6667 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6668}
6669
6670
6671/** Opcode 0x66 0x0f 0x71 11/6. */
6672FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6673{
6674// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6675 SSE2_SHIFT_BODY_Imm(psllw, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6676}
6677
6678
6679/**
6680 * Group 12 jump table for register variant.
6681 */
6682IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6683{
6684 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6685 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6686 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6687 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6688 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6689 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6690 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6691 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6692};
6693AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6694
6695
6696/** Opcode 0x0f 0x71. */
6697FNIEMOP_DEF(iemOp_Grp12)
6698{
6699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6700 if (IEM_IS_MODRM_REG_MODE(bRm))
6701 /* register, register */
6702 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6703 + pVCpu->iem.s.idxPrefix], bRm);
6704 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6705}
6706
6707
6708/** Opcode 0x0f 0x72 11/2. */
6709FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6710{
6711// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6712 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6713}
6714
6715
6716/** Opcode 0x66 0x0f 0x72 11/2. */
6717FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6718{
6719// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6720 SSE2_SHIFT_BODY_Imm(psrld, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6721}
6722
6723
6724/** Opcode 0x0f 0x72 11/4. */
6725FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6726{
6727// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6728 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6729}
6730
6731
6732/** Opcode 0x66 0x0f 0x72 11/4. */
6733FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6734{
6735// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6736 SSE2_SHIFT_BODY_Imm(psrad, bRm, 0);
6737}
6738
6739
6740/** Opcode 0x0f 0x72 11/6. */
6741FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6742{
6743// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6744 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6745}
6746
6747/** Opcode 0x66 0x0f 0x72 11/6. */
6748FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6749{
6750// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6751 SSE2_SHIFT_BODY_Imm(pslld, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6752}
6753
6754
6755/**
6756 * Group 13 jump table for register variant.
6757 */
6758IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6759{
6760 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6761 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6762 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6763 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6764 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6765 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6766 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6767 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6768};
6769AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6770
6771/** Opcode 0x0f 0x72. */
6772FNIEMOP_DEF(iemOp_Grp13)
6773{
6774 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6775 if (IEM_IS_MODRM_REG_MODE(bRm))
6776 /* register, register */
6777 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6778 + pVCpu->iem.s.idxPrefix], bRm);
6779 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6780}
6781
6782
6783/** Opcode 0x0f 0x73 11/2. */
6784FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6785{
6786// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6787 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6788}
6789
6790
6791/** Opcode 0x66 0x0f 0x73 11/2. */
6792FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6793{
6794// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6795 SSE2_SHIFT_BODY_Imm(psrlq, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6796}
6797
6798
6799/** Opcode 0x66 0x0f 0x73 11/3. */
6800FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6801{
6802// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6803 SSE2_SHIFT_BODY_Imm(psrldq, bRm, 0);
6804}
6805
6806
6807/** Opcode 0x0f 0x73 11/6. */
6808FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6809{
6810// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6811 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6812}
6813
6814
6815/** Opcode 0x66 0x0f 0x73 11/6. */
6816FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6817{
6818// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6819 SSE2_SHIFT_BODY_Imm(psllq, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6820}
6821
6822
6823/** Opcode 0x66 0x0f 0x73 11/7. */
6824FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6825{
6826// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6827 SSE2_SHIFT_BODY_Imm(pslldq, bRm, 0);
6828}
6829
6830/**
6831 * Group 14 jump table for register variant.
6832 */
6833IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6834{
6835 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6836 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6837 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6838 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6839 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6840 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6841 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6842 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6843};
6844AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
6845
6846
6847/** Opcode 0x0f 0x73. */
6848FNIEMOP_DEF(iemOp_Grp14)
6849{
6850 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6851 if (IEM_IS_MODRM_REG_MODE(bRm))
6852 /* register, register */
6853 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6854 + pVCpu->iem.s.idxPrefix], bRm);
6855 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6856}
6857
6858
6859/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
6860FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
6861{
6862 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6863 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
6864}
6865
6866
6867/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
6868FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
6869{
6870 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6871 SSE2_OPT_BODY_FullFull_To_Full(pcmpeqb, iemAImpl_pcmpeqb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6872}
6873
6874
6875/* Opcode 0xf3 0x0f 0x74 - invalid */
6876/* Opcode 0xf2 0x0f 0x74 - invalid */
6877
6878
6879/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
6880FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
6881{
6882 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6883 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
6884}
6885
6886
6887/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
6888FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
6889{
6890 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6891 SSE2_OPT_BODY_FullFull_To_Full(pcmpeqw, iemAImpl_pcmpeqw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6892}
6893
6894
6895/* Opcode 0xf3 0x0f 0x75 - invalid */
6896/* Opcode 0xf2 0x0f 0x75 - invalid */
6897
6898
6899/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
6900FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
6901{
6902 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6903 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
6904}
6905
6906
6907/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
6908FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
6909{
6910 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6911 SSE2_OPT_BODY_FullFull_To_Full(pcmpeqd, iemAImpl_pcmpeqd_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
6912}
6913
6914
6915/* Opcode 0xf3 0x0f 0x76 - invalid */
6916/* Opcode 0xf2 0x0f 0x76 - invalid */
6917
6918
6919/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
6920FNIEMOP_DEF(iemOp_emms)
6921{
6922 IEMOP_MNEMONIC(emms, "emms");
6923 IEM_MC_BEGIN(0, 0);
6924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6925 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6926 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6927 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6928 IEM_MC_FPU_FROM_MMX_MODE();
6929 IEM_MC_ADVANCE_RIP_AND_FINISH();
6930 IEM_MC_END();
6931}
6932
6933/* Opcode 0x66 0x0f 0x77 - invalid */
6934/* Opcode 0xf3 0x0f 0x77 - invalid */
6935/* Opcode 0xf2 0x0f 0x77 - invalid */
6936
6937/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
6938#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6939FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
6940{
6941 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
6942 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
6943 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
6944 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
6945
6946 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6947 if (IEM_IS_MODRM_REG_MODE(bRm))
6948 {
6949 /*
6950 * Register, register.
6951 */
6952 if (enmEffOpSize == IEMMODE_64BIT)
6953 {
6954 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6955 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6956 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6957 IEM_MC_ARG(uint64_t, u64Enc, 1);
6958 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6959 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6960 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
6961 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
6962 iemCImpl_vmread_reg64, pu64Dst, u64Enc);
6963 IEM_MC_END();
6964 }
6965 else
6966 {
6967 IEM_MC_BEGIN(0, 0);
6968 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6969 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6970 IEM_MC_ARG(uint32_t, u32Enc, 1);
6971 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6972 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6973 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
6974 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
6975 iemCImpl_vmread_reg32, pu64Dst, u32Enc);
6976 IEM_MC_END();
6977 }
6978 }
6979 else
6980 {
6981 /*
6982 * Memory, register.
6983 */
6984 if (enmEffOpSize == IEMMODE_64BIT)
6985 {
6986 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
6987 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
6988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
6989 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
6990 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
6991 IEM_MC_ARG(uint64_t, u64Enc, 2);
6992 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
6993 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
6994 iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
6995 IEM_MC_END();
6996 }
6997 else
6998 {
6999 IEM_MC_BEGIN(0, 0);
7000 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7001 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7002 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7003 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7004 IEM_MC_ARG(uint32_t, u32Enc, 2);
7005 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7006 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7007 iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7008 IEM_MC_END();
7009 }
7010 }
7011}
7012#else
7013FNIEMOP_UD_STUB(iemOp_vmread_Ey_Gy);
7014#endif
7015
7016/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7017FNIEMOP_STUB(iemOp_AmdGrp17);
7018/* Opcode 0xf3 0x0f 0x78 - invalid */
7019/* Opcode 0xf2 0x0f 0x78 - invalid */
7020
7021/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7022#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7023FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7024{
7025 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7026 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7027 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7028 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7029
7030 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7031 if (IEM_IS_MODRM_REG_MODE(bRm))
7032 {
7033 /*
7034 * Register, register.
7035 */
7036 if (enmEffOpSize == IEMMODE_64BIT)
7037 {
7038 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7039 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7040 IEM_MC_ARG(uint64_t, u64Val, 0);
7041 IEM_MC_ARG(uint64_t, u64Enc, 1);
7042 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7043 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7044 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u64Val, u64Enc);
7045 IEM_MC_END();
7046 }
7047 else
7048 {
7049 IEM_MC_BEGIN(0, 0);
7050 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7051 IEM_MC_ARG(uint32_t, u32Val, 0);
7052 IEM_MC_ARG(uint32_t, u32Enc, 1);
7053 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7054 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7055 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u32Val, u32Enc);
7056 IEM_MC_END();
7057 }
7058 }
7059 else
7060 {
7061 /*
7062 * Register, memory.
7063 */
7064 if (enmEffOpSize == IEMMODE_64BIT)
7065 {
7066 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7067 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7069 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7070 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7071 IEM_MC_ARG(uint64_t, u64Enc, 2);
7072 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7073 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7074 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7075 IEM_MC_END();
7076 }
7077 else
7078 {
7079 IEM_MC_BEGIN(0, 0);
7080 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7081 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7082 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7083 IEM_MC_ARG(uint32_t, u32Enc, 2);
7084 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7085 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7086 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7087 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7088 IEM_MC_END();
7089 }
7090 }
7091}
7092#else
7093FNIEMOP_UD_STUB(iemOp_vmwrite_Gy_Ey);
7094#endif
7095/* Opcode 0x66 0x0f 0x79 - invalid */
7096/* Opcode 0xf3 0x0f 0x79 - invalid */
7097/* Opcode 0xf2 0x0f 0x79 - invalid */
7098
7099/* Opcode 0x0f 0x7a - invalid */
7100/* Opcode 0x66 0x0f 0x7a - invalid */
7101/* Opcode 0xf3 0x0f 0x7a - invalid */
7102/* Opcode 0xf2 0x0f 0x7a - invalid */
7103
7104/* Opcode 0x0f 0x7b - invalid */
7105/* Opcode 0x66 0x0f 0x7b - invalid */
7106/* Opcode 0xf3 0x0f 0x7b - invalid */
7107/* Opcode 0xf2 0x0f 0x7b - invalid */
7108
7109/* Opcode 0x0f 0x7c - invalid */
7110
7111
7112/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7113FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7114{
7115 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7116 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7117}
7118
7119
7120/* Opcode 0xf3 0x0f 0x7c - invalid */
7121
7122
7123/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7124FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7125{
7126 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7127 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7128}
7129
7130
7131/* Opcode 0x0f 0x7d - invalid */
7132
7133
7134/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7135FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7136{
7137 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7138 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7139}
7140
7141
7142/* Opcode 0xf3 0x0f 0x7d - invalid */
7143
7144
7145/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7146FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7147{
7148 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7149 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7150}
7151
7152
7153/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7154FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7155{
7156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7157 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7158 {
7159 /**
7160 * @opcode 0x7e
7161 * @opcodesub rex.w=1
7162 * @oppfx none
7163 * @opcpuid mmx
7164 * @opgroup og_mmx_datamove
7165 * @opxcpttype 5
7166 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7167 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7168 */
7169 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7170 if (IEM_IS_MODRM_REG_MODE(bRm))
7171 {
7172 /* greg64, MMX */
7173 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7175 IEM_MC_LOCAL(uint64_t, u64Tmp);
7176
7177 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7178 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7179 IEM_MC_FPU_TO_MMX_MODE();
7180
7181 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7182 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7183
7184 IEM_MC_ADVANCE_RIP_AND_FINISH();
7185 IEM_MC_END();
7186 }
7187 else
7188 {
7189 /* [mem64], MMX */
7190 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7192 IEM_MC_LOCAL(uint64_t, u64Tmp);
7193
7194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7196 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7197 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7198
7199 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7200 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7201 IEM_MC_FPU_TO_MMX_MODE();
7202
7203 IEM_MC_ADVANCE_RIP_AND_FINISH();
7204 IEM_MC_END();
7205 }
7206 }
7207 else
7208 {
7209 /**
7210 * @opdone
7211 * @opcode 0x7e
7212 * @opcodesub rex.w=0
7213 * @oppfx none
7214 * @opcpuid mmx
7215 * @opgroup og_mmx_datamove
7216 * @opxcpttype 5
7217 * @opfunction iemOp_movd_q_Pd_Ey
7218 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7219 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7220 */
7221 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7222 if (IEM_IS_MODRM_REG_MODE(bRm))
7223 {
7224 /* greg32, MMX */
7225 IEM_MC_BEGIN(0, 0);
7226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7227 IEM_MC_LOCAL(uint32_t, u32Tmp);
7228
7229 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7230 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7231 IEM_MC_FPU_TO_MMX_MODE();
7232
7233 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm), 0);
7234 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7235
7236 IEM_MC_ADVANCE_RIP_AND_FINISH();
7237 IEM_MC_END();
7238 }
7239 else
7240 {
7241 /* [mem32], MMX */
7242 IEM_MC_BEGIN(0, 0);
7243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7244 IEM_MC_LOCAL(uint32_t, u32Tmp);
7245
7246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7248 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7249 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7250
7251 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm), 0);
7252 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7253 IEM_MC_FPU_TO_MMX_MODE();
7254
7255 IEM_MC_ADVANCE_RIP_AND_FINISH();
7256 IEM_MC_END();
7257 }
7258 }
7259}
7260
7261
7262FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7263{
7264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7265 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7266 {
7267 /**
7268 * @opcode 0x7e
7269 * @opcodesub rex.w=1
7270 * @oppfx 0x66
7271 * @opcpuid sse2
7272 * @opgroup og_sse2_simdint_datamove
7273 * @opxcpttype 5
7274 * @optest 64-bit / op1=1 op2=2 -> op1=2
7275 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7276 */
7277 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7278 if (IEM_IS_MODRM_REG_MODE(bRm))
7279 {
7280 /* greg64, XMM */
7281 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7283 IEM_MC_LOCAL(uint64_t, u64Tmp);
7284
7285 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7286 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7287
7288 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7289 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7290
7291 IEM_MC_ADVANCE_RIP_AND_FINISH();
7292 IEM_MC_END();
7293 }
7294 else
7295 {
7296 /* [mem64], XMM */
7297 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
7298 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7299 IEM_MC_LOCAL(uint64_t, u64Tmp);
7300
7301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7303 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7304 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7305
7306 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7307 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7308
7309 IEM_MC_ADVANCE_RIP_AND_FINISH();
7310 IEM_MC_END();
7311 }
7312 }
7313 else
7314 {
7315 /**
7316 * @opdone
7317 * @opcode 0x7e
7318 * @opcodesub rex.w=0
7319 * @oppfx 0x66
7320 * @opcpuid sse2
7321 * @opgroup og_sse2_simdint_datamove
7322 * @opxcpttype 5
7323 * @opfunction iemOp_movd_q_Vy_Ey
7324 * @optest op1=1 op2=2 -> op1=2
7325 * @optest op1=0 op2=-42 -> op1=-42
7326 */
7327 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7328 if (IEM_IS_MODRM_REG_MODE(bRm))
7329 {
7330 /* greg32, XMM */
7331 IEM_MC_BEGIN(0, 0);
7332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7333 IEM_MC_LOCAL(uint32_t, u32Tmp);
7334
7335 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7336 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7337
7338 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7339 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7340
7341 IEM_MC_ADVANCE_RIP_AND_FINISH();
7342 IEM_MC_END();
7343 }
7344 else
7345 {
7346 /* [mem32], XMM */
7347 IEM_MC_BEGIN(0, 0);
7348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7349 IEM_MC_LOCAL(uint32_t, u32Tmp);
7350
7351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7353 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7354 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7355
7356 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7357 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7358
7359 IEM_MC_ADVANCE_RIP_AND_FINISH();
7360 IEM_MC_END();
7361 }
7362 }
7363}
7364
7365/**
7366 * @opcode 0x7e
7367 * @oppfx 0xf3
7368 * @opcpuid sse2
7369 * @opgroup og_sse2_pcksclr_datamove
7370 * @opxcpttype none
7371 * @optest op1=1 op2=2 -> op1=2
7372 * @optest op1=0 op2=-42 -> op1=-42
7373 */
7374FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7375{
7376 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7377 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7378 if (IEM_IS_MODRM_REG_MODE(bRm))
7379 {
7380 /*
7381 * XMM128, XMM64.
7382 */
7383 IEM_MC_BEGIN(0, 0);
7384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7385 IEM_MC_LOCAL(uint64_t, uSrc);
7386
7387 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7388 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7389
7390 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
7391 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7392
7393 IEM_MC_ADVANCE_RIP_AND_FINISH();
7394 IEM_MC_END();
7395 }
7396 else
7397 {
7398 /*
7399 * XMM128, [mem64].
7400 */
7401 IEM_MC_BEGIN(0, 0);
7402 IEM_MC_LOCAL(uint64_t, uSrc);
7403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7404
7405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7407 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7408 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7409
7410 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7411 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7412
7413 IEM_MC_ADVANCE_RIP_AND_FINISH();
7414 IEM_MC_END();
7415 }
7416}
7417
7418/* Opcode 0xf2 0x0f 0x7e - invalid */
7419
7420
7421/** Opcode 0x0f 0x7f - movq Qq, Pq */
7422FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7423{
7424 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7425 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7426 if (IEM_IS_MODRM_REG_MODE(bRm))
7427 {
7428 /*
7429 * MMX, MMX.
7430 */
7431 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7432 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7433 IEM_MC_BEGIN(0, 0);
7434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7435 IEM_MC_LOCAL(uint64_t, u64Tmp);
7436 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7437 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7438 IEM_MC_FPU_TO_MMX_MODE();
7439
7440 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7441 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7442
7443 IEM_MC_ADVANCE_RIP_AND_FINISH();
7444 IEM_MC_END();
7445 }
7446 else
7447 {
7448 /*
7449 * [mem64], MMX.
7450 */
7451 IEM_MC_BEGIN(0, 0);
7452 IEM_MC_LOCAL(uint64_t, u64Tmp);
7453 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7454
7455 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7457 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7458 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7459
7460 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7461 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7462 IEM_MC_FPU_TO_MMX_MODE();
7463
7464 IEM_MC_ADVANCE_RIP_AND_FINISH();
7465 IEM_MC_END();
7466 }
7467}
7468
7469/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7470FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7471{
7472 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7474 if (IEM_IS_MODRM_REG_MODE(bRm))
7475 {
7476 /*
7477 * XMM, XMM.
7478 */
7479 IEM_MC_BEGIN(0, 0);
7480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7481 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7482 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7483 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7484 IEM_GET_MODRM_REG(pVCpu, bRm));
7485 IEM_MC_ADVANCE_RIP_AND_FINISH();
7486 IEM_MC_END();
7487 }
7488 else
7489 {
7490 /*
7491 * [mem128], XMM.
7492 */
7493 IEM_MC_BEGIN(0, 0);
7494 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7496
7497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7499 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7500 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7501
7502 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7503 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7504
7505 IEM_MC_ADVANCE_RIP_AND_FINISH();
7506 IEM_MC_END();
7507 }
7508}
7509
7510/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7511FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7512{
7513 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7514 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7515 if (IEM_IS_MODRM_REG_MODE(bRm))
7516 {
7517 /*
7518 * XMM, XMM.
7519 */
7520 IEM_MC_BEGIN(0, 0);
7521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7522 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7523 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7524 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7525 IEM_GET_MODRM_REG(pVCpu, bRm));
7526 IEM_MC_ADVANCE_RIP_AND_FINISH();
7527 IEM_MC_END();
7528 }
7529 else
7530 {
7531 /*
7532 * [mem128], XMM.
7533 */
7534 IEM_MC_BEGIN(0, 0);
7535 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7537
7538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7540 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7541 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7542
7543 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7544 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7545
7546 IEM_MC_ADVANCE_RIP_AND_FINISH();
7547 IEM_MC_END();
7548 }
7549}
7550
7551/* Opcode 0xf2 0x0f 0x7f - invalid */
7552
7553
7554/**
7555 * @opcode 0x80
7556 * @opfltest of
7557 */
7558FNIEMOP_DEF(iemOp_jo_Jv)
7559{
7560 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7561 IEMOP_HLP_MIN_386();
7562 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7563 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7564 {
7565 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7566 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7568 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7569 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7570 } IEM_MC_ELSE() {
7571 IEM_MC_ADVANCE_RIP_AND_FINISH();
7572 } IEM_MC_ENDIF();
7573 IEM_MC_END();
7574 }
7575 else
7576 {
7577 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7578 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7580 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7581 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7582 } IEM_MC_ELSE() {
7583 IEM_MC_ADVANCE_RIP_AND_FINISH();
7584 } IEM_MC_ENDIF();
7585 IEM_MC_END();
7586 }
7587}
7588
7589
7590/**
7591 * @opcode 0x81
7592 * @opfltest of
7593 */
7594FNIEMOP_DEF(iemOp_jno_Jv)
7595{
7596 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7597 IEMOP_HLP_MIN_386();
7598 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7599 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7600 {
7601 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7602 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7604 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7605 IEM_MC_ADVANCE_RIP_AND_FINISH();
7606 } IEM_MC_ELSE() {
7607 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7608 } IEM_MC_ENDIF();
7609 IEM_MC_END();
7610 }
7611 else
7612 {
7613 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7614 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7616 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7617 IEM_MC_ADVANCE_RIP_AND_FINISH();
7618 } IEM_MC_ELSE() {
7619 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7620 } IEM_MC_ENDIF();
7621 IEM_MC_END();
7622 }
7623}
7624
7625
7626/**
7627 * @opcode 0x82
7628 * @opfltest cf
7629 */
7630FNIEMOP_DEF(iemOp_jc_Jv)
7631{
7632 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7633 IEMOP_HLP_MIN_386();
7634 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7635 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7636 {
7637 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7638 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7640 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7641 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7642 } IEM_MC_ELSE() {
7643 IEM_MC_ADVANCE_RIP_AND_FINISH();
7644 } IEM_MC_ENDIF();
7645 IEM_MC_END();
7646 }
7647 else
7648 {
7649 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7650 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7652 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7653 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7654 } IEM_MC_ELSE() {
7655 IEM_MC_ADVANCE_RIP_AND_FINISH();
7656 } IEM_MC_ENDIF();
7657 IEM_MC_END();
7658 }
7659}
7660
7661
7662/**
7663 * @opcode 0x83
7664 * @opfltest cf
7665 */
7666FNIEMOP_DEF(iemOp_jnc_Jv)
7667{
7668 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7669 IEMOP_HLP_MIN_386();
7670 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7671 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7672 {
7673 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7674 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7676 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7677 IEM_MC_ADVANCE_RIP_AND_FINISH();
7678 } IEM_MC_ELSE() {
7679 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7680 } IEM_MC_ENDIF();
7681 IEM_MC_END();
7682 }
7683 else
7684 {
7685 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7686 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7688 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7689 IEM_MC_ADVANCE_RIP_AND_FINISH();
7690 } IEM_MC_ELSE() {
7691 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7692 } IEM_MC_ENDIF();
7693 IEM_MC_END();
7694 }
7695}
7696
7697
7698/**
7699 * @opcode 0x84
7700 * @opfltest zf
7701 */
7702FNIEMOP_DEF(iemOp_je_Jv)
7703{
7704 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7705 IEMOP_HLP_MIN_386();
7706 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7707 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7708 {
7709 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7710 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7712 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7713 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7714 } IEM_MC_ELSE() {
7715 IEM_MC_ADVANCE_RIP_AND_FINISH();
7716 } IEM_MC_ENDIF();
7717 IEM_MC_END();
7718 }
7719 else
7720 {
7721 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7722 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7724 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7725 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7726 } IEM_MC_ELSE() {
7727 IEM_MC_ADVANCE_RIP_AND_FINISH();
7728 } IEM_MC_ENDIF();
7729 IEM_MC_END();
7730 }
7731}
7732
7733
7734/**
7735 * @opcode 0x85
7736 * @opfltest zf
7737 */
7738FNIEMOP_DEF(iemOp_jne_Jv)
7739{
7740 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7741 IEMOP_HLP_MIN_386();
7742 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7743 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7744 {
7745 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7746 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7748 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7749 IEM_MC_ADVANCE_RIP_AND_FINISH();
7750 } IEM_MC_ELSE() {
7751 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7752 } IEM_MC_ENDIF();
7753 IEM_MC_END();
7754 }
7755 else
7756 {
7757 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7758 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7760 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7761 IEM_MC_ADVANCE_RIP_AND_FINISH();
7762 } IEM_MC_ELSE() {
7763 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7764 } IEM_MC_ENDIF();
7765 IEM_MC_END();
7766 }
7767}
7768
7769
7770/**
7771 * @opcode 0x86
7772 * @opfltest cf,zf
7773 */
7774FNIEMOP_DEF(iemOp_jbe_Jv)
7775{
7776 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7777 IEMOP_HLP_MIN_386();
7778 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7779 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7780 {
7781 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7782 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7784 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7785 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7786 } IEM_MC_ELSE() {
7787 IEM_MC_ADVANCE_RIP_AND_FINISH();
7788 } IEM_MC_ENDIF();
7789 IEM_MC_END();
7790 }
7791 else
7792 {
7793 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7794 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7796 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7797 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7798 } IEM_MC_ELSE() {
7799 IEM_MC_ADVANCE_RIP_AND_FINISH();
7800 } IEM_MC_ENDIF();
7801 IEM_MC_END();
7802 }
7803}
7804
7805
7806/**
7807 * @opcode 0x87
7808 * @opfltest cf,zf
7809 */
7810FNIEMOP_DEF(iemOp_jnbe_Jv)
7811{
7812 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7813 IEMOP_HLP_MIN_386();
7814 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7815 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7816 {
7817 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7818 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7820 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7821 IEM_MC_ADVANCE_RIP_AND_FINISH();
7822 } IEM_MC_ELSE() {
7823 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7824 } IEM_MC_ENDIF();
7825 IEM_MC_END();
7826 }
7827 else
7828 {
7829 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7830 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7832 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7833 IEM_MC_ADVANCE_RIP_AND_FINISH();
7834 } IEM_MC_ELSE() {
7835 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7836 } IEM_MC_ENDIF();
7837 IEM_MC_END();
7838 }
7839}
7840
7841
7842/**
7843 * @opcode 0x88
7844 * @opfltest sf
7845 */
7846FNIEMOP_DEF(iemOp_js_Jv)
7847{
7848 IEMOP_MNEMONIC(js_Jv, "js Jv");
7849 IEMOP_HLP_MIN_386();
7850 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7851 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7852 {
7853 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7854 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7856 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7857 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7858 } IEM_MC_ELSE() {
7859 IEM_MC_ADVANCE_RIP_AND_FINISH();
7860 } IEM_MC_ENDIF();
7861 IEM_MC_END();
7862 }
7863 else
7864 {
7865 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7866 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7868 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7869 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7870 } IEM_MC_ELSE() {
7871 IEM_MC_ADVANCE_RIP_AND_FINISH();
7872 } IEM_MC_ENDIF();
7873 IEM_MC_END();
7874 }
7875}
7876
7877
7878/**
7879 * @opcode 0x89
7880 * @opfltest sf
7881 */
7882FNIEMOP_DEF(iemOp_jns_Jv)
7883{
7884 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
7885 IEMOP_HLP_MIN_386();
7886 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7887 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7888 {
7889 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7890 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7892 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7893 IEM_MC_ADVANCE_RIP_AND_FINISH();
7894 } IEM_MC_ELSE() {
7895 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7896 } IEM_MC_ENDIF();
7897 IEM_MC_END();
7898 }
7899 else
7900 {
7901 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7902 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7904 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7905 IEM_MC_ADVANCE_RIP_AND_FINISH();
7906 } IEM_MC_ELSE() {
7907 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7908 } IEM_MC_ENDIF();
7909 IEM_MC_END();
7910 }
7911}
7912
7913
7914/**
7915 * @opcode 0x8a
7916 * @opfltest pf
7917 */
7918FNIEMOP_DEF(iemOp_jp_Jv)
7919{
7920 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
7921 IEMOP_HLP_MIN_386();
7922 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7923 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7924 {
7925 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7926 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7928 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7929 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7930 } IEM_MC_ELSE() {
7931 IEM_MC_ADVANCE_RIP_AND_FINISH();
7932 } IEM_MC_ENDIF();
7933 IEM_MC_END();
7934 }
7935 else
7936 {
7937 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7938 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7940 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7941 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7942 } IEM_MC_ELSE() {
7943 IEM_MC_ADVANCE_RIP_AND_FINISH();
7944 } IEM_MC_ENDIF();
7945 IEM_MC_END();
7946 }
7947}
7948
7949
7950/**
7951 * @opcode 0x8b
7952 * @opfltest pf
7953 */
7954FNIEMOP_DEF(iemOp_jnp_Jv)
7955{
7956 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
7957 IEMOP_HLP_MIN_386();
7958 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7959 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7960 {
7961 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7962 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7964 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7965 IEM_MC_ADVANCE_RIP_AND_FINISH();
7966 } IEM_MC_ELSE() {
7967 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7968 } IEM_MC_ENDIF();
7969 IEM_MC_END();
7970 }
7971 else
7972 {
7973 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7974 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7976 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7977 IEM_MC_ADVANCE_RIP_AND_FINISH();
7978 } IEM_MC_ELSE() {
7979 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7980 } IEM_MC_ENDIF();
7981 IEM_MC_END();
7982 }
7983}
7984
7985
7986/**
7987 * @opcode 0x8c
7988 * @opfltest sf,of
7989 */
7990FNIEMOP_DEF(iemOp_jl_Jv)
7991{
7992 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
7993 IEMOP_HLP_MIN_386();
7994 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7995 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7996 {
7997 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
7998 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8000 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8001 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8002 } IEM_MC_ELSE() {
8003 IEM_MC_ADVANCE_RIP_AND_FINISH();
8004 } IEM_MC_ENDIF();
8005 IEM_MC_END();
8006 }
8007 else
8008 {
8009 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8010 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8012 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8013 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8014 } IEM_MC_ELSE() {
8015 IEM_MC_ADVANCE_RIP_AND_FINISH();
8016 } IEM_MC_ENDIF();
8017 IEM_MC_END();
8018 }
8019}
8020
8021
8022/**
8023 * @opcode 0x8d
8024 * @opfltest sf,of
8025 */
8026FNIEMOP_DEF(iemOp_jnl_Jv)
8027{
8028 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8029 IEMOP_HLP_MIN_386();
8030 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8031 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8032 {
8033 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8034 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8036 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8037 IEM_MC_ADVANCE_RIP_AND_FINISH();
8038 } IEM_MC_ELSE() {
8039 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8040 } IEM_MC_ENDIF();
8041 IEM_MC_END();
8042 }
8043 else
8044 {
8045 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8046 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8048 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8049 IEM_MC_ADVANCE_RIP_AND_FINISH();
8050 } IEM_MC_ELSE() {
8051 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8052 } IEM_MC_ENDIF();
8053 IEM_MC_END();
8054 }
8055}
8056
8057
8058/**
8059 * @opcode 0x8e
8060 * @opfltest zf,sf,of
8061 */
8062FNIEMOP_DEF(iemOp_jle_Jv)
8063{
8064 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8065 IEMOP_HLP_MIN_386();
8066 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8067 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8068 {
8069 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8070 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8072 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8073 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8074 } IEM_MC_ELSE() {
8075 IEM_MC_ADVANCE_RIP_AND_FINISH();
8076 } IEM_MC_ENDIF();
8077 IEM_MC_END();
8078 }
8079 else
8080 {
8081 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8082 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8084 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8085 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8086 } IEM_MC_ELSE() {
8087 IEM_MC_ADVANCE_RIP_AND_FINISH();
8088 } IEM_MC_ENDIF();
8089 IEM_MC_END();
8090 }
8091}
8092
8093
8094/**
8095 * @opcode 0x8f
8096 * @opfltest zf,sf,of
8097 */
8098FNIEMOP_DEF(iemOp_jnle_Jv)
8099{
8100 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8101 IEMOP_HLP_MIN_386();
8102 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8103 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8104 {
8105 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8106 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8108 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8109 IEM_MC_ADVANCE_RIP_AND_FINISH();
8110 } IEM_MC_ELSE() {
8111 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8112 } IEM_MC_ENDIF();
8113 IEM_MC_END();
8114 }
8115 else
8116 {
8117 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8118 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8120 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8121 IEM_MC_ADVANCE_RIP_AND_FINISH();
8122 } IEM_MC_ELSE() {
8123 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8124 } IEM_MC_ENDIF();
8125 IEM_MC_END();
8126 }
8127}
8128
8129
8130/**
8131 * @opcode 0x90
8132 * @opfltest of
8133 */
8134FNIEMOP_DEF(iemOp_seto_Eb)
8135{
8136 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8137 IEMOP_HLP_MIN_386();
8138 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8139
8140 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8141 * any way. AMD says it's "unused", whatever that means. We're
8142 * ignoring for now. */
8143 if (IEM_IS_MODRM_REG_MODE(bRm))
8144 {
8145 /* register target */
8146 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8148 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8149 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8150 } IEM_MC_ELSE() {
8151 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8152 } IEM_MC_ENDIF();
8153 IEM_MC_ADVANCE_RIP_AND_FINISH();
8154 IEM_MC_END();
8155 }
8156 else
8157 {
8158 /* memory target */
8159 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8163 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8164 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8165 } IEM_MC_ELSE() {
8166 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8167 } IEM_MC_ENDIF();
8168 IEM_MC_ADVANCE_RIP_AND_FINISH();
8169 IEM_MC_END();
8170 }
8171}
8172
8173
8174/**
8175 * @opcode 0x91
8176 * @opfltest of
8177 */
8178FNIEMOP_DEF(iemOp_setno_Eb)
8179{
8180 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8181 IEMOP_HLP_MIN_386();
8182 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8183
8184 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8185 * any way. AMD says it's "unused", whatever that means. We're
8186 * ignoring for now. */
8187 if (IEM_IS_MODRM_REG_MODE(bRm))
8188 {
8189 /* register target */
8190 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8192 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8193 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8194 } IEM_MC_ELSE() {
8195 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8196 } IEM_MC_ENDIF();
8197 IEM_MC_ADVANCE_RIP_AND_FINISH();
8198 IEM_MC_END();
8199 }
8200 else
8201 {
8202 /* memory target */
8203 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8207 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8208 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8209 } IEM_MC_ELSE() {
8210 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8211 } IEM_MC_ENDIF();
8212 IEM_MC_ADVANCE_RIP_AND_FINISH();
8213 IEM_MC_END();
8214 }
8215}
8216
8217
8218/**
8219 * @opcode 0x92
8220 * @opfltest cf
8221 */
8222FNIEMOP_DEF(iemOp_setc_Eb)
8223{
8224 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8225 IEMOP_HLP_MIN_386();
8226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8227
8228 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8229 * any way. AMD says it's "unused", whatever that means. We're
8230 * ignoring for now. */
8231 if (IEM_IS_MODRM_REG_MODE(bRm))
8232 {
8233 /* register target */
8234 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8236 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8237 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8238 } IEM_MC_ELSE() {
8239 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8240 } IEM_MC_ENDIF();
8241 IEM_MC_ADVANCE_RIP_AND_FINISH();
8242 IEM_MC_END();
8243 }
8244 else
8245 {
8246 /* memory target */
8247 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8248 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8251 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8252 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8253 } IEM_MC_ELSE() {
8254 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8255 } IEM_MC_ENDIF();
8256 IEM_MC_ADVANCE_RIP_AND_FINISH();
8257 IEM_MC_END();
8258 }
8259}
8260
8261
8262/**
8263 * @opcode 0x93
8264 * @opfltest cf
8265 */
8266FNIEMOP_DEF(iemOp_setnc_Eb)
8267{
8268 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8269 IEMOP_HLP_MIN_386();
8270 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8271
8272 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8273 * any way. AMD says it's "unused", whatever that means. We're
8274 * ignoring for now. */
8275 if (IEM_IS_MODRM_REG_MODE(bRm))
8276 {
8277 /* register target */
8278 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8280 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8281 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8282 } IEM_MC_ELSE() {
8283 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8284 } IEM_MC_ENDIF();
8285 IEM_MC_ADVANCE_RIP_AND_FINISH();
8286 IEM_MC_END();
8287 }
8288 else
8289 {
8290 /* memory target */
8291 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8295 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8296 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8297 } IEM_MC_ELSE() {
8298 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8299 } IEM_MC_ENDIF();
8300 IEM_MC_ADVANCE_RIP_AND_FINISH();
8301 IEM_MC_END();
8302 }
8303}
8304
8305
8306/**
8307 * @opcode 0x94
8308 * @opfltest zf
8309 */
8310FNIEMOP_DEF(iemOp_sete_Eb)
8311{
8312 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8313 IEMOP_HLP_MIN_386();
8314 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8315
8316 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8317 * any way. AMD says it's "unused", whatever that means. We're
8318 * ignoring for now. */
8319 if (IEM_IS_MODRM_REG_MODE(bRm))
8320 {
8321 /* register target */
8322 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8324 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8325 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8326 } IEM_MC_ELSE() {
8327 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8328 } IEM_MC_ENDIF();
8329 IEM_MC_ADVANCE_RIP_AND_FINISH();
8330 IEM_MC_END();
8331 }
8332 else
8333 {
8334 /* memory target */
8335 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8339 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8340 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8341 } IEM_MC_ELSE() {
8342 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8343 } IEM_MC_ENDIF();
8344 IEM_MC_ADVANCE_RIP_AND_FINISH();
8345 IEM_MC_END();
8346 }
8347}
8348
8349
8350/**
8351 * @opcode 0x95
8352 * @opfltest zf
8353 */
8354FNIEMOP_DEF(iemOp_setne_Eb)
8355{
8356 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8357 IEMOP_HLP_MIN_386();
8358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8359
8360 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8361 * any way. AMD says it's "unused", whatever that means. We're
8362 * ignoring for now. */
8363 if (IEM_IS_MODRM_REG_MODE(bRm))
8364 {
8365 /* register target */
8366 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8368 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8369 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8370 } IEM_MC_ELSE() {
8371 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8372 } IEM_MC_ENDIF();
8373 IEM_MC_ADVANCE_RIP_AND_FINISH();
8374 IEM_MC_END();
8375 }
8376 else
8377 {
8378 /* memory target */
8379 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8383 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8384 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8385 } IEM_MC_ELSE() {
8386 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8387 } IEM_MC_ENDIF();
8388 IEM_MC_ADVANCE_RIP_AND_FINISH();
8389 IEM_MC_END();
8390 }
8391}
8392
8393
8394/**
8395 * @opcode 0x96
8396 * @opfltest cf,zf
8397 */
8398FNIEMOP_DEF(iemOp_setbe_Eb)
8399{
8400 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8401 IEMOP_HLP_MIN_386();
8402 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8403
8404 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8405 * any way. AMD says it's "unused", whatever that means. We're
8406 * ignoring for now. */
8407 if (IEM_IS_MODRM_REG_MODE(bRm))
8408 {
8409 /* register target */
8410 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8412 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8413 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8414 } IEM_MC_ELSE() {
8415 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8416 } IEM_MC_ENDIF();
8417 IEM_MC_ADVANCE_RIP_AND_FINISH();
8418 IEM_MC_END();
8419 }
8420 else
8421 {
8422 /* memory target */
8423 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8427 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8428 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8429 } IEM_MC_ELSE() {
8430 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8431 } IEM_MC_ENDIF();
8432 IEM_MC_ADVANCE_RIP_AND_FINISH();
8433 IEM_MC_END();
8434 }
8435}
8436
8437
8438/**
8439 * @opcode 0x97
8440 * @opfltest cf,zf
8441 */
8442FNIEMOP_DEF(iemOp_setnbe_Eb)
8443{
8444 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8445 IEMOP_HLP_MIN_386();
8446 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8447
8448 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8449 * any way. AMD says it's "unused", whatever that means. We're
8450 * ignoring for now. */
8451 if (IEM_IS_MODRM_REG_MODE(bRm))
8452 {
8453 /* register target */
8454 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8456 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8457 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8458 } IEM_MC_ELSE() {
8459 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8460 } IEM_MC_ENDIF();
8461 IEM_MC_ADVANCE_RIP_AND_FINISH();
8462 IEM_MC_END();
8463 }
8464 else
8465 {
8466 /* memory target */
8467 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8471 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8472 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8473 } IEM_MC_ELSE() {
8474 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8475 } IEM_MC_ENDIF();
8476 IEM_MC_ADVANCE_RIP_AND_FINISH();
8477 IEM_MC_END();
8478 }
8479}
8480
8481
8482/**
8483 * @opcode 0x98
8484 * @opfltest sf
8485 */
8486FNIEMOP_DEF(iemOp_sets_Eb)
8487{
8488 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8489 IEMOP_HLP_MIN_386();
8490 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8491
8492 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8493 * any way. AMD says it's "unused", whatever that means. We're
8494 * ignoring for now. */
8495 if (IEM_IS_MODRM_REG_MODE(bRm))
8496 {
8497 /* register target */
8498 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8500 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8501 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8502 } IEM_MC_ELSE() {
8503 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8504 } IEM_MC_ENDIF();
8505 IEM_MC_ADVANCE_RIP_AND_FINISH();
8506 IEM_MC_END();
8507 }
8508 else
8509 {
8510 /* memory target */
8511 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8515 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8516 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8517 } IEM_MC_ELSE() {
8518 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8519 } IEM_MC_ENDIF();
8520 IEM_MC_ADVANCE_RIP_AND_FINISH();
8521 IEM_MC_END();
8522 }
8523}
8524
8525
8526/**
8527 * @opcode 0x99
8528 * @opfltest sf
8529 */
8530FNIEMOP_DEF(iemOp_setns_Eb)
8531{
8532 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8533 IEMOP_HLP_MIN_386();
8534 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8535
8536 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8537 * any way. AMD says it's "unused", whatever that means. We're
8538 * ignoring for now. */
8539 if (IEM_IS_MODRM_REG_MODE(bRm))
8540 {
8541 /* register target */
8542 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8544 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8545 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8546 } IEM_MC_ELSE() {
8547 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8548 } IEM_MC_ENDIF();
8549 IEM_MC_ADVANCE_RIP_AND_FINISH();
8550 IEM_MC_END();
8551 }
8552 else
8553 {
8554 /* memory target */
8555 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8559 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8560 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8561 } IEM_MC_ELSE() {
8562 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8563 } IEM_MC_ENDIF();
8564 IEM_MC_ADVANCE_RIP_AND_FINISH();
8565 IEM_MC_END();
8566 }
8567}
8568
8569
8570/**
8571 * @opcode 0x9a
8572 * @opfltest pf
8573 */
8574FNIEMOP_DEF(iemOp_setp_Eb)
8575{
8576 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8577 IEMOP_HLP_MIN_386();
8578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8579
8580 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8581 * any way. AMD says it's "unused", whatever that means. We're
8582 * ignoring for now. */
8583 if (IEM_IS_MODRM_REG_MODE(bRm))
8584 {
8585 /* register target */
8586 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8588 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8589 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8590 } IEM_MC_ELSE() {
8591 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8592 } IEM_MC_ENDIF();
8593 IEM_MC_ADVANCE_RIP_AND_FINISH();
8594 IEM_MC_END();
8595 }
8596 else
8597 {
8598 /* memory target */
8599 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8603 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8604 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8605 } IEM_MC_ELSE() {
8606 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8607 } IEM_MC_ENDIF();
8608 IEM_MC_ADVANCE_RIP_AND_FINISH();
8609 IEM_MC_END();
8610 }
8611}
8612
8613
8614/**
8615 * @opcode 0x9b
8616 * @opfltest pf
8617 */
8618FNIEMOP_DEF(iemOp_setnp_Eb)
8619{
8620 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8621 IEMOP_HLP_MIN_386();
8622 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8623
8624 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8625 * any way. AMD says it's "unused", whatever that means. We're
8626 * ignoring for now. */
8627 if (IEM_IS_MODRM_REG_MODE(bRm))
8628 {
8629 /* register target */
8630 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8632 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8633 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8634 } IEM_MC_ELSE() {
8635 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8636 } IEM_MC_ENDIF();
8637 IEM_MC_ADVANCE_RIP_AND_FINISH();
8638 IEM_MC_END();
8639 }
8640 else
8641 {
8642 /* memory target */
8643 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8647 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8648 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8649 } IEM_MC_ELSE() {
8650 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8651 } IEM_MC_ENDIF();
8652 IEM_MC_ADVANCE_RIP_AND_FINISH();
8653 IEM_MC_END();
8654 }
8655}
8656
8657
8658/**
8659 * @opcode 0x9c
8660 * @opfltest sf,of
8661 */
8662FNIEMOP_DEF(iemOp_setl_Eb)
8663{
8664 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8665 IEMOP_HLP_MIN_386();
8666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8667
8668 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8669 * any way. AMD says it's "unused", whatever that means. We're
8670 * ignoring for now. */
8671 if (IEM_IS_MODRM_REG_MODE(bRm))
8672 {
8673 /* register target */
8674 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8676 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8677 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8678 } IEM_MC_ELSE() {
8679 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8680 } IEM_MC_ENDIF();
8681 IEM_MC_ADVANCE_RIP_AND_FINISH();
8682 IEM_MC_END();
8683 }
8684 else
8685 {
8686 /* memory target */
8687 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8691 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8692 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8693 } IEM_MC_ELSE() {
8694 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8695 } IEM_MC_ENDIF();
8696 IEM_MC_ADVANCE_RIP_AND_FINISH();
8697 IEM_MC_END();
8698 }
8699}
8700
8701
8702/**
8703 * @opcode 0x9d
8704 * @opfltest sf,of
8705 */
8706FNIEMOP_DEF(iemOp_setnl_Eb)
8707{
8708 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8709 IEMOP_HLP_MIN_386();
8710 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8711
8712 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8713 * any way. AMD says it's "unused", whatever that means. We're
8714 * ignoring for now. */
8715 if (IEM_IS_MODRM_REG_MODE(bRm))
8716 {
8717 /* register target */
8718 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8720 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8721 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8722 } IEM_MC_ELSE() {
8723 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8724 } IEM_MC_ENDIF();
8725 IEM_MC_ADVANCE_RIP_AND_FINISH();
8726 IEM_MC_END();
8727 }
8728 else
8729 {
8730 /* memory target */
8731 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8735 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8736 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8737 } IEM_MC_ELSE() {
8738 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8739 } IEM_MC_ENDIF();
8740 IEM_MC_ADVANCE_RIP_AND_FINISH();
8741 IEM_MC_END();
8742 }
8743}
8744
8745
8746/**
8747 * @opcode 0x9e
8748 * @opfltest zf,sf,of
8749 */
8750FNIEMOP_DEF(iemOp_setle_Eb)
8751{
8752 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8753 IEMOP_HLP_MIN_386();
8754 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8755
8756 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8757 * any way. AMD says it's "unused", whatever that means. We're
8758 * ignoring for now. */
8759 if (IEM_IS_MODRM_REG_MODE(bRm))
8760 {
8761 /* register target */
8762 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8764 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8765 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8766 } IEM_MC_ELSE() {
8767 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8768 } IEM_MC_ENDIF();
8769 IEM_MC_ADVANCE_RIP_AND_FINISH();
8770 IEM_MC_END();
8771 }
8772 else
8773 {
8774 /* memory target */
8775 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8779 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8780 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8781 } IEM_MC_ELSE() {
8782 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8783 } IEM_MC_ENDIF();
8784 IEM_MC_ADVANCE_RIP_AND_FINISH();
8785 IEM_MC_END();
8786 }
8787}
8788
8789
8790/**
8791 * @opcode 0x9f
8792 * @opfltest zf,sf,of
8793 */
8794FNIEMOP_DEF(iemOp_setnle_Eb)
8795{
8796 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8797 IEMOP_HLP_MIN_386();
8798 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8799
8800 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8801 * any way. AMD says it's "unused", whatever that means. We're
8802 * ignoring for now. */
8803 if (IEM_IS_MODRM_REG_MODE(bRm))
8804 {
8805 /* register target */
8806 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8808 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8809 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8810 } IEM_MC_ELSE() {
8811 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8812 } IEM_MC_ENDIF();
8813 IEM_MC_ADVANCE_RIP_AND_FINISH();
8814 IEM_MC_END();
8815 }
8816 else
8817 {
8818 /* memory target */
8819 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
8820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8823 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8824 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8825 } IEM_MC_ELSE() {
8826 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8827 } IEM_MC_ENDIF();
8828 IEM_MC_ADVANCE_RIP_AND_FINISH();
8829 IEM_MC_END();
8830 }
8831}
8832
8833
8834/** Opcode 0x0f 0xa0. */
8835FNIEMOP_DEF(iemOp_push_fs)
8836{
8837 IEMOP_MNEMONIC(push_fs, "push fs");
8838 IEMOP_HLP_MIN_386();
8839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8840 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8841}
8842
8843
8844/** Opcode 0x0f 0xa1. */
8845FNIEMOP_DEF(iemOp_pop_fs)
8846{
8847 IEMOP_MNEMONIC(pop_fs, "pop fs");
8848 IEMOP_HLP_MIN_386();
8849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8850 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8851 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
8852 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8853 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8854 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8855 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8856 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS),
8857 iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8858}
8859
8860
8861/** Opcode 0x0f 0xa2. */
8862FNIEMOP_DEF(iemOp_cpuid)
8863{
8864 IEMOP_MNEMONIC(cpuid, "cpuid");
8865 IEMOP_HLP_MIN_486(); /* not all 486es. */
8866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8867 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
8868 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8869 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
8870 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
8871 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX),
8872 iemCImpl_cpuid);
8873}
8874
8875
8876/**
8877 * Body for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8878 * iemOp_bts_Ev_Gv.
8879 */
8880
8881#define IEMOP_BODY_BIT_Ev_Gv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
8882 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
8883 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
8884 \
8885 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8886 { \
8887 /* register destination. */ \
8888 switch (pVCpu->iem.s.enmEffOpSize) \
8889 { \
8890 case IEMMODE_16BIT: \
8891 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8893 \
8894 IEM_MC_ARG(uint16_t, u16Src, 2); \
8895 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8896 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
8897 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
8898 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8899 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8900 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
8901 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8902 \
8903 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8904 IEM_MC_END(); \
8905 break; \
8906 \
8907 case IEMMODE_32BIT: \
8908 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8910 \
8911 IEM_MC_ARG(uint32_t, u32Src, 2); \
8912 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8913 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
8914 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
8915 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8916 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8917 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
8918 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8919 \
8920 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8921 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8922 IEM_MC_END(); \
8923 break; \
8924 \
8925 case IEMMODE_64BIT: \
8926 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
8927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8928 \
8929 IEM_MC_ARG(uint64_t, u64Src, 2); \
8930 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8931 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
8932 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
8933 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8934 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8935 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
8936 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8937 \
8938 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8939 IEM_MC_END(); \
8940 break; \
8941 \
8942 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8943 } \
8944 } \
8945 else \
8946 { \
8947 /* memory destination. */ \
8948 /** @todo test negative bit offsets! */ \
8949 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
8950 { \
8951 switch (pVCpu->iem.s.enmEffOpSize) \
8952 { \
8953 case IEMMODE_16BIT: \
8954 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8957 IEMOP_HLP_DONE_DECODING(); \
8958 \
8959 IEM_MC_ARG(uint16_t, u16Src, 2); \
8960 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8961 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
8962 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
8963 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
8964 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
8965 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
8966 \
8967 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8968 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
8969 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8970 \
8971 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8972 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
8973 \
8974 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8975 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
8976 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8977 IEM_MC_END(); \
8978 break; \
8979 \
8980 case IEMMODE_32BIT: \
8981 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
8982 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8983 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8984 IEMOP_HLP_DONE_DECODING(); \
8985 \
8986 IEM_MC_ARG(uint32_t, u32Src, 2); \
8987 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8988 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
8989 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
8990 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
8991 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
8992 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
8993 \
8994 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8995 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
8996 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8997 \
8998 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
8999 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
9000 \
9001 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9002 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9003 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9004 IEM_MC_END(); \
9005 break; \
9006 \
9007 case IEMMODE_64BIT: \
9008 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9011 IEMOP_HLP_DONE_DECODING(); \
9012 \
9013 IEM_MC_ARG(uint64_t, u64Src, 2); \
9014 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9015 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9016 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9017 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9018 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9019 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9020 \
9021 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9022 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
9023 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9024 \
9025 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9026 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
9027 \
9028 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9029 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9030 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9031 IEM_MC_END(); \
9032 break; \
9033 \
9034 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9035 } \
9036 } \
9037 else \
9038 { \
9039 (void)0
9040/* Separate macro to work around parsing issue in IEMAllInstPython.py */
9041#define IEMOP_BODY_BIT_Ev_Gv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
9042 switch (pVCpu->iem.s.enmEffOpSize) \
9043 { \
9044 case IEMMODE_16BIT: \
9045 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9048 IEMOP_HLP_DONE_DECODING(); \
9049 \
9050 IEM_MC_ARG(uint16_t, u16Src, 2); \
9051 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9052 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9053 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9054 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9055 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9056 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9057 \
9058 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9059 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
9060 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9061 \
9062 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9063 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU16, fEFlagsIn, pu16Dst, u16Src); \
9064 \
9065 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9066 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9067 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9068 IEM_MC_END(); \
9069 break; \
9070 \
9071 case IEMMODE_32BIT: \
9072 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9075 IEMOP_HLP_DONE_DECODING(); \
9076 \
9077 IEM_MC_ARG(uint32_t, u32Src, 2); \
9078 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9079 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9080 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9081 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9082 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9083 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9084 \
9085 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9086 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
9087 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9088 \
9089 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9090 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU32, fEFlagsIn, pu32Dst, u32Src); \
9091 \
9092 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9093 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9094 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9095 IEM_MC_END(); \
9096 break; \
9097 \
9098 case IEMMODE_64BIT: \
9099 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9102 IEMOP_HLP_DONE_DECODING(); \
9103 \
9104 IEM_MC_ARG(uint64_t, u64Src, 2); \
9105 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9106 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9107 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9108 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9109 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9110 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9111 \
9112 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9113 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
9114 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9115 \
9116 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9117 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU64, fEFlagsIn, pu64Dst, u64Src); \
9118 \
9119 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9120 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9121 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9122 IEM_MC_END(); \
9123 break; \
9124 \
9125 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9126 } \
9127 } \
9128 } \
9129 (void)0
9130
9131/* Read-only version (bt). */
9132#define IEMOP_BODY_BIT_Ev_Gv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
9133 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9134 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
9135 \
9136 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9137 { \
9138 /* register destination. */ \
9139 switch (pVCpu->iem.s.enmEffOpSize) \
9140 { \
9141 case IEMMODE_16BIT: \
9142 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9144 \
9145 IEM_MC_ARG(uint16_t, u16Src, 2); \
9146 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9147 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
9148 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
9149 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9150 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9151 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
9152 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9153 \
9154 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9155 IEM_MC_END(); \
9156 break; \
9157 \
9158 case IEMMODE_32BIT: \
9159 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9161 \
9162 IEM_MC_ARG(uint32_t, u32Src, 2); \
9163 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9164 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
9165 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
9166 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9167 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9168 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
9169 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9170 \
9171 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9172 IEM_MC_END(); \
9173 break; \
9174 \
9175 case IEMMODE_64BIT: \
9176 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9178 \
9179 IEM_MC_ARG(uint64_t, u64Src, 2); \
9180 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9181 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9182 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
9183 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9184 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9185 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
9186 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9187 \
9188 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9189 IEM_MC_END(); \
9190 break; \
9191 \
9192 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9193 } \
9194 } \
9195 else \
9196 { \
9197 /* memory destination. */ \
9198 /** @todo test negative bit offsets! */ \
9199 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
9200 { \
9201 switch (pVCpu->iem.s.enmEffOpSize) \
9202 { \
9203 case IEMMODE_16BIT: \
9204 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9205 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9207 IEMOP_HLP_DONE_DECODING(); \
9208 \
9209 IEM_MC_ARG(uint16_t, u16Src, 2); \
9210 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9211 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9212 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9213 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9214 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9215 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9216 \
9217 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9218 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
9219 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9220 \
9221 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9222 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
9223 \
9224 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9225 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9226 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9227 IEM_MC_END(); \
9228 break; \
9229 \
9230 case IEMMODE_32BIT: \
9231 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9234 IEMOP_HLP_DONE_DECODING(); \
9235 \
9236 IEM_MC_ARG(uint32_t, u32Src, 2); \
9237 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9238 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9239 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9240 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9241 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9242 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9243 \
9244 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
9245 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9246 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9247 \
9248 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9249 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
9250 \
9251 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9252 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9253 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9254 IEM_MC_END(); \
9255 break; \
9256 \
9257 case IEMMODE_64BIT: \
9258 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9261 IEMOP_HLP_DONE_DECODING(); \
9262 \
9263 IEM_MC_ARG(uint64_t, u64Src, 2); \
9264 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9265 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9266 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9267 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9268 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9269 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9270 \
9271 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9272 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
9273 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9274 \
9275 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
9276 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
9277 \
9278 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9279 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
9280 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9281 IEM_MC_END(); \
9282 break; \
9283 \
9284 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9285 } \
9286 } \
9287 else \
9288 { \
9289 IEMOP_HLP_DONE_DECODING(); \
9290 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
9291 } \
9292 } \
9293 (void)0
9294
9295
9296/**
9297 * @opcode 0xa3
9298 * @oppfx n/a
9299 * @opflclass bitmap
9300 */
9301FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9302{
9303 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9304 IEMOP_HLP_MIN_386();
9305 IEMOP_BODY_BIT_Ev_Gv_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
9306}
9307
9308
9309/**
9310 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9311 */
9312#define IEMOP_BODY_SHLD_SHR_Ib(a_pImplExpr) \
9313 PCIEMOPSHIFTDBLSIZES const pImpl = (a_pImplExpr); \
9314 \
9315 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9316 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); \
9317 \
9318 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9319 { \
9320 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9321 \
9322 switch (pVCpu->iem.s.enmEffOpSize) \
9323 { \
9324 case IEMMODE_16BIT: \
9325 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9327 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9328 IEM_MC_ARG(uint16_t, u16Src, 1); \
9329 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9330 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9331 \
9332 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9333 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9334 IEM_MC_REF_EFLAGS(pEFlags); \
9335 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9336 \
9337 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9338 IEM_MC_END(); \
9339 break; \
9340 \
9341 case IEMMODE_32BIT: \
9342 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9344 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9345 IEM_MC_ARG(uint32_t, u32Src, 1); \
9346 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9347 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9348 \
9349 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9350 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9351 IEM_MC_REF_EFLAGS(pEFlags); \
9352 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9353 \
9354 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9355 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9356 IEM_MC_END(); \
9357 break; \
9358 \
9359 case IEMMODE_64BIT: \
9360 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9362 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9363 IEM_MC_ARG(uint64_t, u64Src, 1); \
9364 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9365 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9366 \
9367 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9368 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9369 IEM_MC_REF_EFLAGS(pEFlags); \
9370 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9371 \
9372 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9373 IEM_MC_END(); \
9374 break; \
9375 \
9376 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9377 } \
9378 } \
9379 else \
9380 { \
9381 switch (pVCpu->iem.s.enmEffOpSize) \
9382 { \
9383 case IEMMODE_16BIT: \
9384 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9387 \
9388 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9390 \
9391 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9392 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9393 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9394 \
9395 IEM_MC_ARG(uint16_t, u16Src, 1); \
9396 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9397 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9398 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9399 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9400 \
9401 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9402 IEM_MC_COMMIT_EFLAGS(EFlags); \
9403 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9404 IEM_MC_END(); \
9405 break; \
9406 \
9407 case IEMMODE_32BIT: \
9408 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9411 \
9412 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9414 \
9415 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9416 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9417 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9418 \
9419 IEM_MC_ARG(uint32_t, u32Src, 1); \
9420 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9421 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9422 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9423 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9424 \
9425 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9426 IEM_MC_COMMIT_EFLAGS(EFlags); \
9427 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9428 IEM_MC_END(); \
9429 break; \
9430 \
9431 case IEMMODE_64BIT: \
9432 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9434 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9435 \
9436 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9438 \
9439 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9440 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9441 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9442 \
9443 IEM_MC_ARG(uint64_t, u64Src, 1); \
9444 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9445 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9446 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9447 \
9448 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9449 \
9450 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9451 IEM_MC_COMMIT_EFLAGS(EFlags); \
9452 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9453 IEM_MC_END(); \
9454 break; \
9455 \
9456 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9457 } \
9458 } (void)0
9459
9460
9461/**
9462 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9463 */
9464#define IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(a_pImplExpr) \
9465 PCIEMOPSHIFTDBLSIZES const pImpl = (a_pImplExpr); \
9466 \
9467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9468 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); \
9469 \
9470 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9471 { \
9472 switch (pVCpu->iem.s.enmEffOpSize) \
9473 { \
9474 case IEMMODE_16BIT: \
9475 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9477 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9478 IEM_MC_ARG(uint16_t, u16Src, 1); \
9479 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9480 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9481 \
9482 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9483 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9484 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9485 IEM_MC_REF_EFLAGS(pEFlags); \
9486 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9487 \
9488 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9489 IEM_MC_END(); \
9490 break; \
9491 \
9492 case IEMMODE_32BIT: \
9493 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9495 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9496 IEM_MC_ARG(uint32_t, u32Src, 1); \
9497 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9498 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9499 \
9500 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9501 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9502 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9503 IEM_MC_REF_EFLAGS(pEFlags); \
9504 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9505 \
9506 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9507 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9508 IEM_MC_END(); \
9509 break; \
9510 \
9511 case IEMMODE_64BIT: \
9512 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9514 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9515 IEM_MC_ARG(uint64_t, u64Src, 1); \
9516 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9517 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9518 \
9519 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9520 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9521 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9522 IEM_MC_REF_EFLAGS(pEFlags); \
9523 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9524 \
9525 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9526 IEM_MC_END(); \
9527 break; \
9528 \
9529 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9530 } \
9531 } \
9532 else \
9533 { \
9534 switch (pVCpu->iem.s.enmEffOpSize) \
9535 { \
9536 case IEMMODE_16BIT: \
9537 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9538 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9539 IEM_MC_ARG(uint16_t, u16Src, 1); \
9540 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9542 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9543 \
9544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9546 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9547 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9548 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9549 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9550 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9551 \
9552 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9553 IEM_MC_COMMIT_EFLAGS(EFlags); \
9554 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9555 IEM_MC_END(); \
9556 break; \
9557 \
9558 case IEMMODE_32BIT: \
9559 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
9560 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9561 IEM_MC_ARG(uint32_t, u32Src, 1); \
9562 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9563 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9564 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9565 \
9566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9568 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9569 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9570 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9571 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9572 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9573 \
9574 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9575 IEM_MC_COMMIT_EFLAGS(EFlags); \
9576 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9577 IEM_MC_END(); \
9578 break; \
9579 \
9580 case IEMMODE_64BIT: \
9581 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
9582 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9583 IEM_MC_ARG(uint64_t, u64Src, 1); \
9584 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9586 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9587 \
9588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9590 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9591 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9592 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9593 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9594 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9595 \
9596 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9597 IEM_MC_COMMIT_EFLAGS(EFlags); \
9598 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9599 IEM_MC_END(); \
9600 break; \
9601 \
9602 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9603 } \
9604 } (void)0
9605
9606
9607/**
9608 * @opcode 0xa4
9609 * @opflclass shift_count
9610 */
9611FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9612{
9613 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9614 IEMOP_HLP_MIN_386();
9615 IEMOP_BODY_SHLD_SHR_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9616}
9617
9618
9619/**
9620 * @opcode 0xa5
9621 * @opflclass shift_count
9622 */
9623FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9624{
9625 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9626 IEMOP_HLP_MIN_386();
9627 IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9628}
9629
9630
9631/** Opcode 0x0f 0xa8. */
9632FNIEMOP_DEF(iemOp_push_gs)
9633{
9634 IEMOP_MNEMONIC(push_gs, "push gs");
9635 IEMOP_HLP_MIN_386();
9636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9637 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9638}
9639
9640
9641/** Opcode 0x0f 0xa9. */
9642FNIEMOP_DEF(iemOp_pop_gs)
9643{
9644 IEMOP_MNEMONIC(pop_gs, "pop gs");
9645 IEMOP_HLP_MIN_386();
9646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9647 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9648 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
9649 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9650 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9651 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9652 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9653 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9654 iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9655}
9656
9657
9658/** Opcode 0x0f 0xaa. */
9659FNIEMOP_DEF(iemOp_rsm)
9660{
9661 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9662 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9664 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9665 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
9666 iemCImpl_rsm);
9667}
9668
9669
9670
9671/**
9672 * @opcode 0xab
9673 * @oppfx n/a
9674 * @opflclass bitmap
9675 */
9676FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9677{
9678 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9679 IEMOP_HLP_MIN_386();
9680 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
9681 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
9682}
9683
9684
9685/**
9686 * @opcode 0xac
9687 * @opflclass shift_count
9688 */
9689FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9690{
9691 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9692 IEMOP_HLP_MIN_386();
9693 IEMOP_BODY_SHLD_SHR_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9694}
9695
9696
9697/**
9698 * @opcode 0xad
9699 * @opflclass shift_count
9700 */
9701FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9702{
9703 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9704 IEMOP_HLP_MIN_386();
9705 IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9706}
9707
9708
9709/** Opcode 0x0f 0xae mem/0. */
9710FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9711{
9712 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9713 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9714 IEMOP_RAISE_INVALID_OPCODE_RET();
9715
9716 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9717 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9718 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9720 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9721 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9722 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9723 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9724 IEM_MC_END();
9725}
9726
9727
9728/** Opcode 0x0f 0xae mem/1. */
9729FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9730{
9731 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9732 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9733 IEMOP_RAISE_INVALID_OPCODE_RET();
9734
9735 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9736 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9739 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9740 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9741 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9742 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
9743 iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9744 IEM_MC_END();
9745}
9746
9747
9748/**
9749 * @opmaps grp15
9750 * @opcode !11/2
9751 * @oppfx none
9752 * @opcpuid sse
9753 * @opgroup og_sse_mxcsrsm
9754 * @opxcpttype 5
9755 * @optest op1=0 -> mxcsr=0
9756 * @optest op1=0x2083 -> mxcsr=0x2083
9757 * @optest op1=0xfffffffe -> value.xcpt=0xd
9758 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9759 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9760 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9761 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9762 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9763 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9764 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9765 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9766 */
9767FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9768{
9769 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9770 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9771 IEMOP_RAISE_INVALID_OPCODE_RET();
9772
9773 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9774 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9775 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9777 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9778 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9779 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_MxCsr), iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9780 IEM_MC_END();
9781}
9782
9783
9784/**
9785 * @opmaps grp15
9786 * @opcode !11/3
9787 * @oppfx none
9788 * @opcpuid sse
9789 * @opgroup og_sse_mxcsrsm
9790 * @opxcpttype 5
9791 * @optest mxcsr=0 -> op1=0
9792 * @optest mxcsr=0x2083 -> op1=0x2083
9793 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9794 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9795 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9796 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9797 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9798 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9799 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9800 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9801 */
9802FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9803{
9804 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9805 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9806 IEMOP_RAISE_INVALID_OPCODE_RET();
9807
9808 IEM_MC_BEGIN(IEM_MC_F_MIN_PENTIUM_II, 0);
9809 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9812 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9813 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9814 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_MxCsr), iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9815 IEM_MC_END();
9816}
9817
9818
9819/**
9820 * @opmaps grp15
9821 * @opcode !11/4
9822 * @oppfx none
9823 * @opcpuid xsave
9824 * @opgroup og_system
9825 * @opxcpttype none
9826 */
9827FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9828{
9829 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9830 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9831 IEMOP_RAISE_INVALID_OPCODE_RET();
9832
9833 IEM_MC_BEGIN(IEM_MC_F_MIN_CORE, 0);
9834 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9837 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9838 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9839 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9840 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9841 IEM_MC_END();
9842}
9843
9844
9845/**
9846 * @opmaps grp15
9847 * @opcode !11/5
9848 * @oppfx none
9849 * @opcpuid xsave
9850 * @opgroup og_system
9851 * @opxcpttype none
9852 */
9853FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9854{
9855 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9856 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9857 IEMOP_RAISE_INVALID_OPCODE_RET();
9858
9859 IEM_MC_BEGIN(IEM_MC_F_MIN_CORE, 0);
9860 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9861 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9863 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9864 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9865 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9866 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
9867 iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9868 IEM_MC_END();
9869}
9870
9871/** Opcode 0x0f 0xae mem/6. */
9872FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9873
9874/**
9875 * @opmaps grp15
9876 * @opcode !11/7
9877 * @oppfx none
9878 * @opcpuid clfsh
9879 * @opgroup og_cachectl
9880 * @optest op1=1 ->
9881 */
9882FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9883{
9884 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9885 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9886 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9887
9888 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9889 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9890 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9892 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9893 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9894 IEM_MC_END();
9895}
9896
9897/**
9898 * @opmaps grp15
9899 * @opcode !11/7
9900 * @oppfx 0x66
9901 * @opcpuid clflushopt
9902 * @opgroup og_cachectl
9903 * @optest op1=1 ->
9904 */
9905FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9906{
9907 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9908 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9909 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9910
9911 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9912 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9915 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9916 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9917 IEM_MC_END();
9918}
9919
9920
9921/** Opcode 0x0f 0xae 11b/5. */
9922FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9923{
9924 RT_NOREF_PV(bRm);
9925 IEMOP_MNEMONIC(lfence, "lfence");
9926 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9928#ifdef RT_ARCH_ARM64
9929 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9930#else
9931 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9932 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9933 else
9934 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9935#endif
9936 IEM_MC_ADVANCE_RIP_AND_FINISH();
9937 IEM_MC_END();
9938}
9939
9940
9941/** Opcode 0x0f 0xae 11b/6. */
9942FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9943{
9944 RT_NOREF_PV(bRm);
9945 IEMOP_MNEMONIC(mfence, "mfence");
9946 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9948#ifdef RT_ARCH_ARM64
9949 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9950#else
9951 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9952 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9953 else
9954 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9955#endif
9956 IEM_MC_ADVANCE_RIP_AND_FINISH();
9957 IEM_MC_END();
9958}
9959
9960
9961/** Opcode 0x0f 0xae 11b/7. */
9962FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9963{
9964 RT_NOREF_PV(bRm);
9965 IEMOP_MNEMONIC(sfence, "sfence");
9966 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9968#ifdef RT_ARCH_ARM64
9969 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9970#else
9971 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9972 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9973 else
9974 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9975#endif
9976 IEM_MC_ADVANCE_RIP_AND_FINISH();
9977 IEM_MC_END();
9978}
9979
9980
9981/** Opcode 0xf3 0x0f 0xae 11b/0. */
9982FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
9983{
9984 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
9985 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9986 {
9987 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
9988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9989 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9990 IEM_MC_LOCAL(uint64_t, u64Dst);
9991 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
9992 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9993 IEM_MC_ADVANCE_RIP_AND_FINISH();
9994 IEM_MC_END();
9995 }
9996 else
9997 {
9998 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
9999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10000 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10001 IEM_MC_LOCAL(uint32_t, u32Dst);
10002 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
10003 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10004 IEM_MC_ADVANCE_RIP_AND_FINISH();
10005 IEM_MC_END();
10006 }
10007}
10008
10009
10010/** Opcode 0xf3 0x0f 0xae 11b/1. */
10011FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
10012{
10013 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
10014 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10015 {
10016 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10018 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10019 IEM_MC_LOCAL(uint64_t, u64Dst);
10020 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
10021 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10022 IEM_MC_ADVANCE_RIP_AND_FINISH();
10023 IEM_MC_END();
10024 }
10025 else
10026 {
10027 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10029 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10030 IEM_MC_LOCAL(uint32_t, u32Dst);
10031 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
10032 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10033 IEM_MC_ADVANCE_RIP_AND_FINISH();
10034 IEM_MC_END();
10035 }
10036}
10037
10038
10039/** Opcode 0xf3 0x0f 0xae 11b/2. */
10040FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
10041{
10042 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
10043 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10044 {
10045 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10047 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10048 IEM_MC_LOCAL(uint64_t, u64Dst);
10049 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10050 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10051 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
10052 IEM_MC_ADVANCE_RIP_AND_FINISH();
10053 IEM_MC_END();
10054 }
10055 else
10056 {
10057 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10059 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10060 IEM_MC_LOCAL(uint32_t, u32Dst);
10061 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10062 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
10063 IEM_MC_ADVANCE_RIP_AND_FINISH();
10064 IEM_MC_END();
10065 }
10066}
10067
10068
10069/** Opcode 0xf3 0x0f 0xae 11b/3. */
10070FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
10071{
10072 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
10073 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10074 {
10075 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10077 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10078 IEM_MC_LOCAL(uint64_t, u64Dst);
10079 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10080 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10081 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
10082 IEM_MC_ADVANCE_RIP_AND_FINISH();
10083 IEM_MC_END();
10084 }
10085 else
10086 {
10087 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
10088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10089 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10090 IEM_MC_LOCAL(uint32_t, u32Dst);
10091 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10092 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
10093 IEM_MC_ADVANCE_RIP_AND_FINISH();
10094 IEM_MC_END();
10095 }
10096}
10097
10098
10099/**
10100 * Group 15 jump table for register variant.
10101 */
10102IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
10103{ /* pfx: none, 066h, 0f3h, 0f2h */
10104 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
10105 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
10106 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
10107 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
10108 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10109 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10110 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10111 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10112};
10113AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
10114
10115
10116/**
10117 * Group 15 jump table for memory variant.
10118 */
10119IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
10120{ /* pfx: none, 066h, 0f3h, 0f2h */
10121 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10122 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10123 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10124 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10125 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10126 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10127 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10128 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10129};
10130AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
10131
10132
10133/** Opcode 0x0f 0xae. */
10134FNIEMOP_DEF(iemOp_Grp15)
10135{
10136 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10138 if (IEM_IS_MODRM_REG_MODE(bRm))
10139 /* register, register */
10140 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10141 + pVCpu->iem.s.idxPrefix], bRm);
10142 /* memory, register */
10143 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10144 + pVCpu->iem.s.idxPrefix], bRm);
10145}
10146
10147
10148/**
10149 * @opcode 0xaf
10150 * @opflclass multiply
10151 */
10152FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10153{
10154 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10155 IEMOP_HLP_MIN_386();
10156 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10157 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags);
10158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10159 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_MIN_386, imul, 0);
10160}
10161
10162
10163/**
10164 * @opcode 0xb0
10165 * @opflclass arithmetic
10166 */
10167FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10168{
10169 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10170 IEMOP_HLP_MIN_486();
10171 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10172
10173 if (IEM_IS_MODRM_REG_MODE(bRm))
10174 {
10175 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10177 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10178 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10179 IEM_MC_ARG(uint8_t, u8Src, 2);
10180 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10181
10182 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10183 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10184 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10185 IEM_MC_REF_EFLAGS(pEFlags);
10186 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10187
10188 IEM_MC_ADVANCE_RIP_AND_FINISH();
10189 IEM_MC_END();
10190 }
10191 else
10192 {
10193#define IEMOP_BODY_CMPXCHG_BYTE(a_fnWorker, a_Type) \
10194 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10195 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10197 IEMOP_HLP_DONE_DECODING(); \
10198 \
10199 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10200 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
10201 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10202 \
10203 IEM_MC_ARG(uint8_t, u8Src, 2); \
10204 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10205 \
10206 IEM_MC_LOCAL(uint8_t, u8Al); \
10207 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX); \
10208 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Al, u8Al, 1); \
10209 \
10210 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10211 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu8Dst, pu8Al, u8Src, pEFlags); \
10212 \
10213 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10214 IEM_MC_COMMIT_EFLAGS(EFlags); \
10215 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al); \
10216 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10217 IEM_MC_END()
10218
10219 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10220 {
10221 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8,RW);
10222 }
10223 else
10224 {
10225 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8_locked,ATOMIC);
10226 }
10227 }
10228}
10229
10230/**
10231 * @opcode 0xb1
10232 * @opflclass arithmetic
10233 */
10234FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10235{
10236 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10237 IEMOP_HLP_MIN_486();
10238 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10239
10240 if (IEM_IS_MODRM_REG_MODE(bRm))
10241 {
10242 switch (pVCpu->iem.s.enmEffOpSize)
10243 {
10244 case IEMMODE_16BIT:
10245 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10247 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10248 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10249 IEM_MC_ARG(uint16_t, u16Src, 2);
10250 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10251
10252 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10253 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10254 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10255 IEM_MC_REF_EFLAGS(pEFlags);
10256 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10257
10258 IEM_MC_ADVANCE_RIP_AND_FINISH();
10259 IEM_MC_END();
10260 break;
10261
10262 case IEMMODE_32BIT:
10263 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
10264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10265 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10266 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10267 IEM_MC_ARG(uint32_t, u32Src, 2);
10268 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10269
10270 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10271 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10272 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10273 IEM_MC_REF_EFLAGS(pEFlags);
10274 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10275
10276 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10277 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
10278 } IEM_MC_ELSE() {
10279 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
10280 } IEM_MC_ENDIF();
10281
10282 IEM_MC_ADVANCE_RIP_AND_FINISH();
10283 IEM_MC_END();
10284 break;
10285
10286 case IEMMODE_64BIT:
10287 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10289 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10290 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10291 IEM_MC_ARG(uint64_t, u64Src, 2);
10292 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10293
10294 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10295 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10296 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10297 IEM_MC_REF_EFLAGS(pEFlags);
10298 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10299
10300 IEM_MC_ADVANCE_RIP_AND_FINISH();
10301 IEM_MC_END();
10302 break;
10303
10304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10305 }
10306 }
10307 else
10308 {
10309#define IEMOP_BODY_CMPXCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64,a_Type) \
10310 do { \
10311 switch (pVCpu->iem.s.enmEffOpSize) \
10312 { \
10313 case IEMMODE_16BIT: \
10314 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10315 \
10316 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10319 IEMOP_HLP_DONE_DECODING(); \
10320 \
10321 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10322 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10323 \
10324 IEM_MC_ARG(uint16_t, u16Src, 2); \
10325 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10326 \
10327 IEM_MC_LOCAL(uint16_t, u16Ax); \
10328 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX); \
10329 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Ax, u16Ax, 1); \
10330 \
10331 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10332 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker16, pu16Dst, pu16Ax, u16Src, pEFlags); \
10333 \
10334 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10335 IEM_MC_COMMIT_EFLAGS(EFlags); \
10336 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax); \
10337 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10338 IEM_MC_END(); \
10339 break; \
10340 \
10341 case IEMMODE_32BIT: \
10342 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
10343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10345 IEMOP_HLP_DONE_DECODING(); \
10346 \
10347 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10348 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10349 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10350 \
10351 IEM_MC_ARG(uint32_t, u32Src, 2); \
10352 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10353 \
10354 IEM_MC_LOCAL(uint32_t, u32Eax); \
10355 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX); \
10356 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Eax, u32Eax, 1); \
10357 \
10358 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10359 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker32, pu32Dst, pu32Eax, u32Src, pEFlags); \
10360 \
10361 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10362 IEM_MC_COMMIT_EFLAGS(EFlags); \
10363 \
10364 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
10365 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax); \
10366 } IEM_MC_ENDIF(); \
10367 \
10368 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10369 IEM_MC_END(); \
10370 break; \
10371 \
10372 case IEMMODE_64BIT: \
10373 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10376 IEMOP_HLP_DONE_DECODING(); \
10377 \
10378 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10379 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10380 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10381 \
10382 IEM_MC_ARG(uint64_t, u64Src, 2); \
10383 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10384 \
10385 IEM_MC_LOCAL(uint64_t, u64Rax); \
10386 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX); \
10387 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Rax, u64Rax, 1); \
10388 \
10389 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10390 \
10391 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker64, pu64Dst, pu64Rax, u64Src, pEFlags); \
10392 \
10393 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10394 IEM_MC_COMMIT_EFLAGS(EFlags); \
10395 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax); \
10396 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10397 IEM_MC_END(); \
10398 break; \
10399 \
10400 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10401 } \
10402 } while (0)
10403
10404 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10405 {
10406 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16, iemAImpl_cmpxchg_u32, iemAImpl_cmpxchg_u64,RW);
10407 }
10408 else
10409 {
10410 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16_locked, iemAImpl_cmpxchg_u32_locked, iemAImpl_cmpxchg_u64_locked,ATOMIC);
10411 }
10412 }
10413}
10414
10415
10416/** Opcode 0x0f 0xb2. */
10417FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10418{
10419 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10420 IEMOP_HLP_MIN_386();
10421 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10422 if (IEM_IS_MODRM_REG_MODE(bRm))
10423 IEMOP_RAISE_INVALID_OPCODE_RET();
10424 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10425}
10426
10427
10428/**
10429 * @opcode 0xb3
10430 * @oppfx n/a
10431 * @opflclass bitmap
10432 */
10433FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10434{
10435 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10436 IEMOP_HLP_MIN_386();
10437 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
10438 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10439}
10440
10441
10442/** Opcode 0x0f 0xb4. */
10443FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10444{
10445 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10446 IEMOP_HLP_MIN_386();
10447 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10448 if (IEM_IS_MODRM_REG_MODE(bRm))
10449 IEMOP_RAISE_INVALID_OPCODE_RET();
10450 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10451}
10452
10453
10454/** Opcode 0x0f 0xb5. */
10455FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10456{
10457 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10458 IEMOP_HLP_MIN_386();
10459 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10460 if (IEM_IS_MODRM_REG_MODE(bRm))
10461 IEMOP_RAISE_INVALID_OPCODE_RET();
10462 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10463}
10464
10465
10466/** Opcode 0x0f 0xb6. */
10467FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10468{
10469 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10470 IEMOP_HLP_MIN_386();
10471
10472 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10473
10474 /*
10475 * If rm is denoting a register, no more instruction bytes.
10476 */
10477 if (IEM_IS_MODRM_REG_MODE(bRm))
10478 {
10479 switch (pVCpu->iem.s.enmEffOpSize)
10480 {
10481 case IEMMODE_16BIT:
10482 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10484 IEM_MC_LOCAL(uint16_t, u16Value);
10485 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10486 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10487 IEM_MC_ADVANCE_RIP_AND_FINISH();
10488 IEM_MC_END();
10489 break;
10490
10491 case IEMMODE_32BIT:
10492 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10494 IEM_MC_LOCAL(uint32_t, u32Value);
10495 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10496 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10497 IEM_MC_ADVANCE_RIP_AND_FINISH();
10498 IEM_MC_END();
10499 break;
10500
10501 case IEMMODE_64BIT:
10502 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10504 IEM_MC_LOCAL(uint64_t, u64Value);
10505 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10506 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10507 IEM_MC_ADVANCE_RIP_AND_FINISH();
10508 IEM_MC_END();
10509 break;
10510
10511 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10512 }
10513 }
10514 else
10515 {
10516 /*
10517 * We're loading a register from memory.
10518 */
10519 switch (pVCpu->iem.s.enmEffOpSize)
10520 {
10521 case IEMMODE_16BIT:
10522 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10523 IEM_MC_LOCAL(uint16_t, u16Value);
10524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10525 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10527 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10528 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10529 IEM_MC_ADVANCE_RIP_AND_FINISH();
10530 IEM_MC_END();
10531 break;
10532
10533 case IEMMODE_32BIT:
10534 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10535 IEM_MC_LOCAL(uint32_t, u32Value);
10536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10539 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10540 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10541 IEM_MC_ADVANCE_RIP_AND_FINISH();
10542 IEM_MC_END();
10543 break;
10544
10545 case IEMMODE_64BIT:
10546 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10547 IEM_MC_LOCAL(uint64_t, u64Value);
10548 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10551 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10552 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10553 IEM_MC_ADVANCE_RIP_AND_FINISH();
10554 IEM_MC_END();
10555 break;
10556
10557 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10558 }
10559 }
10560}
10561
10562
10563/** Opcode 0x0f 0xb7. */
10564FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10565{
10566 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10567 IEMOP_HLP_MIN_386();
10568
10569 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10570
10571 /** @todo Not entirely sure how the operand size prefix is handled here,
10572 * assuming that it will be ignored. Would be nice to have a few
10573 * test for this. */
10574
10575 /** @todo There should be no difference in the behaviour whether REX.W is
10576 * present or not... */
10577
10578 /*
10579 * If rm is denoting a register, no more instruction bytes.
10580 */
10581 if (IEM_IS_MODRM_REG_MODE(bRm))
10582 {
10583 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10584 {
10585 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10587 IEM_MC_LOCAL(uint32_t, u32Value);
10588 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10589 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10590 IEM_MC_ADVANCE_RIP_AND_FINISH();
10591 IEM_MC_END();
10592 }
10593 else
10594 {
10595 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10597 IEM_MC_LOCAL(uint64_t, u64Value);
10598 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10599 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10600 IEM_MC_ADVANCE_RIP_AND_FINISH();
10601 IEM_MC_END();
10602 }
10603 }
10604 else
10605 {
10606 /*
10607 * We're loading a register from memory.
10608 */
10609 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10610 {
10611 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
10612 IEM_MC_LOCAL(uint32_t, u32Value);
10613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10616 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10617 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10618 IEM_MC_ADVANCE_RIP_AND_FINISH();
10619 IEM_MC_END();
10620 }
10621 else
10622 {
10623 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
10624 IEM_MC_LOCAL(uint64_t, u64Value);
10625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10628 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10629 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10630 IEM_MC_ADVANCE_RIP_AND_FINISH();
10631 IEM_MC_END();
10632 }
10633 }
10634}
10635
10636
10637/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10638FNIEMOP_UD_STUB(iemOp_jmpe);
10639
10640
10641/**
10642 * @opcode 0xb8
10643 * @oppfx 0xf3
10644 * @opflmodify cf,pf,af,zf,sf,of
10645 * @opflclear cf,pf,af,sf,of
10646 */
10647FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10648{
10649 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10650 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10651 return iemOp_InvalidNeedRM(pVCpu);
10652#ifndef TST_IEM_CHECK_MC
10653# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10654 static const IEMOPBINSIZES s_Native =
10655 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10656# endif
10657 static const IEMOPBINSIZES s_Fallback =
10658 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10659#endif
10660 const IEMOPBINSIZES * const pImpl = IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback);
10661 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10662 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, popcnt, 0);
10663}
10664
10665
10666/**
10667 * @opcode 0xb9
10668 * @opinvalid intel-modrm
10669 * @optest ->
10670 */
10671FNIEMOP_DEF(iemOp_Grp10)
10672{
10673 /*
10674 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10675 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10676 */
10677 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10678 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10679 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10680}
10681
10682
10683/**
10684 * Body for group 8 bit instruction.
10685 */
10686#define IEMOP_BODY_BIT_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10687 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10688 \
10689 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10690 { \
10691 /* register destination. */ \
10692 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10693 \
10694 switch (pVCpu->iem.s.enmEffOpSize) \
10695 { \
10696 case IEMMODE_16BIT: \
10697 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10699 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10700 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10701 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10702 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10703 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10704 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10705 \
10706 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10707 IEM_MC_END(); \
10708 break; \
10709 \
10710 case IEMMODE_32BIT: \
10711 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10713 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10714 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10715 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10716 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10717 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10718 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10719 \
10720 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
10721 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10722 IEM_MC_END(); \
10723 break; \
10724 \
10725 case IEMMODE_64BIT: \
10726 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10728 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10729 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10730 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10731 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10732 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10733 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10734 \
10735 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10736 IEM_MC_END(); \
10737 break; \
10738 \
10739 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10740 } \
10741 } \
10742 else \
10743 { \
10744 /* memory destination. */ \
10745 /** @todo test negative bit offsets! */ \
10746 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
10747 { \
10748 switch (pVCpu->iem.s.enmEffOpSize) \
10749 { \
10750 case IEMMODE_16BIT: \
10751 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10754 \
10755 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10756 IEMOP_HLP_DONE_DECODING(); \
10757 \
10758 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10759 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10760 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10761 \
10762 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10763 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10764 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10765 \
10766 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10767 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10768 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10769 IEM_MC_END(); \
10770 break; \
10771 \
10772 case IEMMODE_32BIT: \
10773 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10775 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10776 \
10777 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10778 IEMOP_HLP_DONE_DECODING(); \
10779 \
10780 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10781 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10782 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10783 \
10784 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10785 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10786 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10787 \
10788 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10789 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10790 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10791 IEM_MC_END(); \
10792 break; \
10793 \
10794 case IEMMODE_64BIT: \
10795 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10798 \
10799 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10800 IEMOP_HLP_DONE_DECODING(); \
10801 \
10802 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10803 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10804 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10805 \
10806 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10807 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10808 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10809 \
10810 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10811 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10812 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10813 IEM_MC_END(); \
10814 break; \
10815 \
10816 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10817 } \
10818 } \
10819 else \
10820 { \
10821 (void)0
10822/* Separate macro to work around parsing issue in IEMAllInstPython.py */
10823#define IEMOP_BODY_BIT_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
10824 switch (pVCpu->iem.s.enmEffOpSize) \
10825 { \
10826 case IEMMODE_16BIT: \
10827 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10830 \
10831 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10832 IEMOP_HLP_DONE_DECODING(); \
10833 \
10834 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
10835 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10836 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10837 \
10838 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10839 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10840 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU16, fEFlagsIn, pu16Dst, u16Src); \
10841 \
10842 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10843 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10844 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10845 IEM_MC_END(); \
10846 break; \
10847 \
10848 case IEMMODE_32BIT: \
10849 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10850 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10851 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10852 \
10853 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10854 IEMOP_HLP_DONE_DECODING(); \
10855 \
10856 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10857 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
10858 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10859 \
10860 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10861 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10862 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU32, fEFlagsIn, pu32Dst, u32Src); \
10863 \
10864 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10865 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10866 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10867 IEM_MC_END(); \
10868 break; \
10869 \
10870 case IEMMODE_64BIT: \
10871 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10874 \
10875 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10876 IEMOP_HLP_DONE_DECODING(); \
10877 \
10878 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10879 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
10880 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10881 \
10882 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10883 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10884 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnLockedU64, fEFlagsIn, pu64Dst, u64Src); \
10885 \
10886 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10887 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10888 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10889 IEM_MC_END(); \
10890 break; \
10891 \
10892 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10893 } \
10894 } \
10895 } \
10896 (void)0
10897
10898/* Read-only version (bt) */
10899#define IEMOP_BODY_BIT_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10900 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10901 \
10902 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10903 { \
10904 /* register destination. */ \
10905 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10906 \
10907 switch (pVCpu->iem.s.enmEffOpSize) \
10908 { \
10909 case IEMMODE_16BIT: \
10910 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10912 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
10913 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10914 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10915 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10916 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10917 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10918 \
10919 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10920 IEM_MC_END(); \
10921 break; \
10922 \
10923 case IEMMODE_32BIT: \
10924 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10926 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
10927 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10928 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10929 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10930 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10931 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10932 \
10933 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10934 IEM_MC_END(); \
10935 break; \
10936 \
10937 case IEMMODE_64BIT: \
10938 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
10939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10940 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
10941 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10942 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10943 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
10944 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
10945 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10946 \
10947 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10948 IEM_MC_END(); \
10949 break; \
10950 \
10951 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10952 } \
10953 } \
10954 else \
10955 { \
10956 /* memory destination. */ \
10957 /** @todo test negative bit offsets! */ \
10958 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
10959 { \
10960 switch (pVCpu->iem.s.enmEffOpSize) \
10961 { \
10962 case IEMMODE_16BIT: \
10963 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10965 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10966 \
10967 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10968 IEMOP_HLP_DONE_DECODING(); \
10969 \
10970 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10971 IEM_MC_ARG(uint16_t const *, pu16Dst, 1); \
10972 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10973 \
10974 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10975 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 2); \
10976 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
10977 \
10978 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
10979 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
10980 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10981 IEM_MC_END(); \
10982 break; \
10983 \
10984 case IEMMODE_32BIT: \
10985 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
10986 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10988 \
10989 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10990 IEMOP_HLP_DONE_DECODING(); \
10991 \
10992 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10993 IEM_MC_ARG(uint32_t const *, pu32Dst, 1); \
10994 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10995 \
10996 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
10997 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 2); \
10998 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
10999 \
11000 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
11001 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11002 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11003 IEM_MC_END(); \
11004 break; \
11005 \
11006 case IEMMODE_64BIT: \
11007 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11008 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11009 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11010 \
11011 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11012 IEMOP_HLP_DONE_DECODING(); \
11013 \
11014 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11015 IEM_MC_ARG(uint64_t const *, pu64Dst, 1); \
11016 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11017 \
11018 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11019 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 2); \
11020 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, a_fnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
11021 \
11022 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
11023 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11024 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11025 IEM_MC_END(); \
11026 break; \
11027 \
11028 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11029 } \
11030 } \
11031 else \
11032 { \
11033 IEMOP_HLP_DONE_DECODING(); \
11034 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
11035 } \
11036 } \
11037 (void)0
11038
11039
11040/**
11041 * @opmaps grp8
11042 * @opcode /4
11043 * @oppfx n/a
11044 * @opflclass bitmap
11045 */
11046FNIEMOPRM_DEF(iemOp_Grp8_bt_Ev_Ib)
11047{
11048 IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib");
11049 IEMOP_BODY_BIT_Ev_Ib_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
11050}
11051
11052
11053/**
11054 * @opmaps grp8
11055 * @opcode /5
11056 * @oppfx n/a
11057 * @opflclass bitmap
11058 */
11059FNIEMOPRM_DEF(iemOp_Grp8_bts_Ev_Ib)
11060{
11061 IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib");
11062 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
11063 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
11064}
11065
11066
11067/**
11068 * @opmaps grp8
11069 * @opcode /6
11070 * @oppfx n/a
11071 * @opflclass bitmap
11072 */
11073FNIEMOPRM_DEF(iemOp_Grp8_btr_Ev_Ib)
11074{
11075 IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib");
11076 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
11077 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
11078}
11079
11080
11081/**
11082 * @opmaps grp8
11083 * @opcode /7
11084 * @oppfx n/a
11085 * @opflclass bitmap
11086 */
11087FNIEMOPRM_DEF(iemOp_Grp8_btc_Ev_Ib)
11088{
11089 IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib");
11090 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11091 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11092}
11093
11094
11095/** Opcode 0x0f 0xba. */
11096FNIEMOP_DEF(iemOp_Grp8)
11097{
11098 IEMOP_HLP_MIN_386();
11099 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11100 switch (IEM_GET_MODRM_REG_8(bRm))
11101 {
11102 case 4: return FNIEMOP_CALL_1(iemOp_Grp8_bt_Ev_Ib, bRm);
11103 case 5: return FNIEMOP_CALL_1(iemOp_Grp8_bts_Ev_Ib, bRm);
11104 case 6: return FNIEMOP_CALL_1(iemOp_Grp8_btr_Ev_Ib, bRm);
11105 case 7: return FNIEMOP_CALL_1(iemOp_Grp8_btc_Ev_Ib, bRm);
11106
11107 case 0: case 1: case 2: case 3:
11108 /* Both AMD and Intel want full modr/m decoding and imm8. */
11109 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
11110
11111 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11112 }
11113}
11114
11115
11116/**
11117 * @opcode 0xbb
11118 * @oppfx n/a
11119 * @opflclass bitmap
11120 */
11121FNIEMOP_DEF(iemOp_btc_Ev_Gv)
11122{
11123 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
11124 IEMOP_HLP_MIN_386();
11125 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11126 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11127}
11128
11129
11130/**
11131 * Body for BSF and BSR instructions.
11132 *
11133 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
11134 * the destination register, which means that for 32-bit operations the high
11135 * bits must be left alone.
11136 *
11137 * @param pImpl Pointer to the instruction implementation (assembly).
11138 */
11139#define IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl) \
11140 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
11141 \
11142 /* \
11143 * If rm is denoting a register, no more instruction bytes. \
11144 */ \
11145 if (IEM_IS_MODRM_REG_MODE(bRm)) \
11146 { \
11147 switch (pVCpu->iem.s.enmEffOpSize) \
11148 { \
11149 case IEMMODE_16BIT: \
11150 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11152 \
11153 IEM_MC_ARG(uint16_t, u16Src, 2); \
11154 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11155 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
11156 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11157 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11158 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
11159 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11160 \
11161 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11162 IEM_MC_END(); \
11163 break; \
11164 \
11165 case IEMMODE_32BIT: \
11166 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11168 \
11169 IEM_MC_ARG(uint32_t, u32Src, 2); \
11170 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11171 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
11172 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11173 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11174 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
11175 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11176 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
11177 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
11178 } IEM_MC_ENDIF(); \
11179 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11180 IEM_MC_END(); \
11181 break; \
11182 \
11183 case IEMMODE_64BIT: \
11184 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11186 \
11187 IEM_MC_ARG(uint64_t, u64Src, 2); \
11188 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11189 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
11190 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11191 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11192 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
11193 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11194 \
11195 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11196 IEM_MC_END(); \
11197 break; \
11198 \
11199 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11200 } \
11201 } \
11202 else \
11203 { \
11204 /* \
11205 * We're accessing memory. \
11206 */ \
11207 switch (pVCpu->iem.s.enmEffOpSize) \
11208 { \
11209 case IEMMODE_16BIT: \
11210 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11211 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11214 \
11215 IEM_MC_ARG(uint16_t, u16Src, 2); \
11216 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11217 IEM_MC_ARG(uint16_t *, pu16Dst, 1); \
11218 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11219 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11220 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU16, fEFlagsIn, pu16Dst, u16Src); \
11221 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11222 \
11223 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11224 IEM_MC_END(); \
11225 break; \
11226 \
11227 case IEMMODE_32BIT: \
11228 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0); \
11229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11232 \
11233 IEM_MC_ARG(uint32_t, u32Src, 2); \
11234 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11235 IEM_MC_ARG(uint32_t *, pu32Dst, 1); \
11236 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11237 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11238 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU32, fEFlagsIn, pu32Dst, u32Src); \
11239 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11240 \
11241 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
11242 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
11243 } IEM_MC_ENDIF(); \
11244 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11245 IEM_MC_END(); \
11246 break; \
11247 \
11248 case IEMMODE_64BIT: \
11249 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11253 \
11254 IEM_MC_ARG(uint64_t, u64Src, 2); \
11255 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11256 IEM_MC_ARG(uint64_t *, pu64Dst, 1); \
11257 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11258 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
11259 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, pImpl->pfnNormalU64, fEFlagsIn, pu64Dst, u64Src); \
11260 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
11261 \
11262 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11263 IEM_MC_END(); \
11264 break; \
11265 \
11266 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11267 } \
11268 } (void)0
11269
11270
11271/**
11272 * @opcode 0xbc
11273 * @oppfx !0xf3
11274 * @opfltest cf,pf,af,sf,of
11275 * @opflmodify cf,pf,af,zf,sf,of
11276 * @opflundef cf,pf,af,sf,of
11277 * @todo AMD doesn't modify cf,pf,af,sf&of but since intel does, we're forced to
11278 * document them as inputs. Sigh.
11279 */
11280FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
11281{
11282 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
11283 IEMOP_HLP_MIN_386();
11284 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11285 PCIEMOPBINSIZES const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags);
11286 IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl);
11287}
11288
11289
11290/**
11291 * @opcode 0xbc
11292 * @oppfx 0xf3
11293 * @opfltest pf,af,sf,of
11294 * @opflmodify cf,pf,af,zf,sf,of
11295 * @opflundef pf,af,sf,of
11296 * @todo AMD doesn't modify pf,af,sf&of but since intel does, we're forced to
11297 * document them as inputs. Sigh.
11298 */
11299FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
11300{
11301 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11302 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
11303 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11304
11305#ifndef TST_IEM_CHECK_MC
11306 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
11307 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
11308 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
11309 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
11310 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
11311 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
11312 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
11313 {
11314 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
11315 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
11316 };
11317#endif
11318 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11319 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags,
11320 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11321 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11322 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, tzcnt, 0);
11323}
11324
11325
11326/**
11327 * @opcode 0xbd
11328 * @oppfx !0xf3
11329 * @opfltest cf,pf,af,sf,of
11330 * @opflmodify cf,pf,af,zf,sf,of
11331 * @opflundef cf,pf,af,sf,of
11332 * @todo AMD doesn't modify cf,pf,af,sf&of but since intel does, we're forced to
11333 * document them as inputs. Sigh.
11334 */
11335FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
11336{
11337 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
11338 IEMOP_HLP_MIN_386();
11339 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11340 PCIEMOPBINSIZES const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags);
11341 IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl);
11342}
11343
11344
11345/**
11346 * @opcode 0xbd
11347 * @oppfx 0xf3
11348 * @opfltest pf,af,sf,of
11349 * @opflmodify cf,pf,af,zf,sf,of
11350 * @opflundef pf,af,sf,of
11351 * @todo AMD doesn't modify pf,af,sf&of but since intel does, we're forced to
11352 * document them as inputs. Sigh.
11353 */
11354FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11355{
11356 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAbm)
11357 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11358 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11359
11360#ifndef TST_IEM_CHECK_MC
11361 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11362 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11363 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11364 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11365 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11366 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11367 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11368 {
11369 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11370 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11371 };
11372#endif
11373 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11374 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags,
11375 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11376 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11377 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, lzcnt, 0);
11378}
11379
11380
11381
11382/** Opcode 0x0f 0xbe. */
11383FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11384{
11385 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11386 IEMOP_HLP_MIN_386();
11387
11388 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11389
11390 /*
11391 * If rm is denoting a register, no more instruction bytes.
11392 */
11393 if (IEM_IS_MODRM_REG_MODE(bRm))
11394 {
11395 switch (pVCpu->iem.s.enmEffOpSize)
11396 {
11397 case IEMMODE_16BIT:
11398 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11400 IEM_MC_LOCAL(uint16_t, u16Value);
11401 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11402 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11403 IEM_MC_ADVANCE_RIP_AND_FINISH();
11404 IEM_MC_END();
11405 break;
11406
11407 case IEMMODE_32BIT:
11408 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11410 IEM_MC_LOCAL(uint32_t, u32Value);
11411 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11412 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11413 IEM_MC_ADVANCE_RIP_AND_FINISH();
11414 IEM_MC_END();
11415 break;
11416
11417 case IEMMODE_64BIT:
11418 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11420 IEM_MC_LOCAL(uint64_t, u64Value);
11421 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11422 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11423 IEM_MC_ADVANCE_RIP_AND_FINISH();
11424 IEM_MC_END();
11425 break;
11426
11427 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11428 }
11429 }
11430 else
11431 {
11432 /*
11433 * We're loading a register from memory.
11434 */
11435 switch (pVCpu->iem.s.enmEffOpSize)
11436 {
11437 case IEMMODE_16BIT:
11438 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11439 IEM_MC_LOCAL(uint16_t, u16Value);
11440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11441 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11443 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11444 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11445 IEM_MC_ADVANCE_RIP_AND_FINISH();
11446 IEM_MC_END();
11447 break;
11448
11449 case IEMMODE_32BIT:
11450 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11451 IEM_MC_LOCAL(uint32_t, u32Value);
11452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11455 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11456 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11457 IEM_MC_ADVANCE_RIP_AND_FINISH();
11458 IEM_MC_END();
11459 break;
11460
11461 case IEMMODE_64BIT:
11462 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11463 IEM_MC_LOCAL(uint64_t, u64Value);
11464 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11467 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11468 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11469 IEM_MC_ADVANCE_RIP_AND_FINISH();
11470 IEM_MC_END();
11471 break;
11472
11473 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11474 }
11475 }
11476}
11477
11478
11479/** Opcode 0x0f 0xbf. */
11480FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11481{
11482 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11483 IEMOP_HLP_MIN_386();
11484
11485 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11486
11487 /** @todo Not entirely sure how the operand size prefix is handled here,
11488 * assuming that it will be ignored. Would be nice to have a few
11489 * test for this. */
11490 /*
11491 * If rm is denoting a register, no more instruction bytes.
11492 */
11493 if (IEM_IS_MODRM_REG_MODE(bRm))
11494 {
11495 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11496 {
11497 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11499 IEM_MC_LOCAL(uint32_t, u32Value);
11500 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11501 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11502 IEM_MC_ADVANCE_RIP_AND_FINISH();
11503 IEM_MC_END();
11504 }
11505 else
11506 {
11507 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11509 IEM_MC_LOCAL(uint64_t, u64Value);
11510 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11511 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11512 IEM_MC_ADVANCE_RIP_AND_FINISH();
11513 IEM_MC_END();
11514 }
11515 }
11516 else
11517 {
11518 /*
11519 * We're loading a register from memory.
11520 */
11521 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11522 {
11523 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
11524 IEM_MC_LOCAL(uint32_t, u32Value);
11525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11528 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11529 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11530 IEM_MC_ADVANCE_RIP_AND_FINISH();
11531 IEM_MC_END();
11532 }
11533 else
11534 {
11535 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11536 IEM_MC_LOCAL(uint64_t, u64Value);
11537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11540 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11541 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11542 IEM_MC_ADVANCE_RIP_AND_FINISH();
11543 IEM_MC_END();
11544 }
11545 }
11546}
11547
11548
11549/**
11550 * @opcode 0xc0
11551 * @opflclass arithmetic
11552 */
11553FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11554{
11555 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11556 IEMOP_HLP_MIN_486();
11557 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11558
11559 /*
11560 * If rm is denoting a register, no more instruction bytes.
11561 */
11562 if (IEM_IS_MODRM_REG_MODE(bRm))
11563 {
11564 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11566 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11567 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11568 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11569
11570 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11571 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11572 IEM_MC_REF_EFLAGS(pEFlags);
11573 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11574
11575 IEM_MC_ADVANCE_RIP_AND_FINISH();
11576 IEM_MC_END();
11577 }
11578 else
11579 {
11580 /*
11581 * We're accessing memory.
11582 */
11583#define IEMOP_BODY_XADD_BYTE(a_fnWorker, a_Type) \
11584 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11587 IEMOP_HLP_DONE_DECODING(); \
11588 \
11589 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11590 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11591 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11592 \
11593 IEM_MC_LOCAL(uint8_t, u8RegCopy); \
11594 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11595 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, u8RegCopy, 1); \
11596 \
11597 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11598 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker, pu8Dst, pu8Reg, pEFlags); \
11599 \
11600 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11601 IEM_MC_COMMIT_EFLAGS(EFlags); \
11602 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy); \
11603 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11604 IEM_MC_END()
11605 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11606 {
11607 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8,RW);
11608 }
11609 else
11610 {
11611 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8_locked,ATOMIC);
11612 }
11613 }
11614}
11615
11616
11617/**
11618 * @opcode 0xc1
11619 * @opflclass arithmetic
11620 */
11621FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11622{
11623 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11624 IEMOP_HLP_MIN_486();
11625 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11626
11627 /*
11628 * If rm is denoting a register, no more instruction bytes.
11629 */
11630 if (IEM_IS_MODRM_REG_MODE(bRm))
11631 {
11632 switch (pVCpu->iem.s.enmEffOpSize)
11633 {
11634 case IEMMODE_16BIT:
11635 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11637 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11638 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11639 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11640
11641 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11642 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11643 IEM_MC_REF_EFLAGS(pEFlags);
11644 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11645
11646 IEM_MC_ADVANCE_RIP_AND_FINISH();
11647 IEM_MC_END();
11648 break;
11649
11650 case IEMMODE_32BIT:
11651 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
11652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11653 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11654 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11655 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11656
11657 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11658 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11659 IEM_MC_REF_EFLAGS(pEFlags);
11660 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11661
11662 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
11663 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
11664 IEM_MC_ADVANCE_RIP_AND_FINISH();
11665 IEM_MC_END();
11666 break;
11667
11668 case IEMMODE_64BIT:
11669 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
11670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11671 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11672 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11673 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11674
11675 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11676 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11677 IEM_MC_REF_EFLAGS(pEFlags);
11678 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11679
11680 IEM_MC_ADVANCE_RIP_AND_FINISH();
11681 IEM_MC_END();
11682 break;
11683
11684 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11685 }
11686 }
11687 else
11688 {
11689 /*
11690 * We're accessing memory.
11691 */
11692#define IEMOP_BODY_XADD_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
11693 do { \
11694 switch (pVCpu->iem.s.enmEffOpSize) \
11695 { \
11696 case IEMMODE_16BIT: \
11697 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11698 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11700 IEMOP_HLP_DONE_DECODING(); \
11701 \
11702 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11703 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11704 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11705 \
11706 IEM_MC_LOCAL(uint16_t, u16RegCopy); \
11707 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11708 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, u16RegCopy, 1); \
11709 \
11710 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11711 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker16, pu16Dst, pu16Reg, pEFlags); \
11712 \
11713 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11714 IEM_MC_COMMIT_EFLAGS(EFlags); \
11715 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy); \
11716 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11717 IEM_MC_END(); \
11718 break; \
11719 \
11720 case IEMMODE_32BIT: \
11721 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0); \
11722 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11723 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11724 IEMOP_HLP_DONE_DECODING(); \
11725 \
11726 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11727 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11728 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11729 \
11730 IEM_MC_LOCAL(uint32_t, u32RegCopy); \
11731 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11732 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, u32RegCopy, 1); \
11733 \
11734 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11735 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker32, pu32Dst, pu32Reg, pEFlags); \
11736 \
11737 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11738 IEM_MC_COMMIT_EFLAGS(EFlags); \
11739 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy); \
11740 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11741 IEM_MC_END(); \
11742 break; \
11743 \
11744 case IEMMODE_64BIT: \
11745 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
11746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11748 IEMOP_HLP_DONE_DECODING(); \
11749 \
11750 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11751 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11752 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11753 \
11754 IEM_MC_LOCAL(uint64_t, u64RegCopy); \
11755 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11756 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, u64RegCopy, 1); \
11757 \
11758 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11759 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker64, pu64Dst, pu64Reg, pEFlags); \
11760 \
11761 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11762 IEM_MC_COMMIT_EFLAGS(EFlags); \
11763 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy); \
11764 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11765 IEM_MC_END(); \
11766 break; \
11767 \
11768 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11769 } \
11770 } while (0)
11771
11772 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11773 {
11774 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16, iemAImpl_xadd_u32, iemAImpl_xadd_u64,RW);
11775 }
11776 else
11777 {
11778 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16_locked, iemAImpl_xadd_u32_locked, iemAImpl_xadd_u64_locked,ATOMIC);
11779 }
11780 }
11781}
11782
11783
11784/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11785FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11786{
11787 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11788
11789 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11790 if (IEM_IS_MODRM_REG_MODE(bRm))
11791 {
11792 /*
11793 * XMM, XMM.
11794 */
11795 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11796 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11798 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11799 IEM_MC_LOCAL(X86XMMREG, Dst);
11800 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11801 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11802 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11803 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11804 IEM_MC_PREPARE_SSE_USAGE();
11805 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11806 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpps_u128, pDst, pSrc, bImmArg);
11807 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11808 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11809
11810 IEM_MC_ADVANCE_RIP_AND_FINISH();
11811 IEM_MC_END();
11812 }
11813 else
11814 {
11815 /*
11816 * XMM, [mem128].
11817 */
11818 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11819 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11820 IEM_MC_LOCAL(X86XMMREG, Dst);
11821 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11822 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11824
11825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11826 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11827 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11829 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11830 IEM_MC_PREPARE_SSE_USAGE();
11831
11832 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11833 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpps_u128, pDst, pSrc, bImmArg);
11834 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11835 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11836
11837 IEM_MC_ADVANCE_RIP_AND_FINISH();
11838 IEM_MC_END();
11839 }
11840}
11841
11842
11843/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11844FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11845{
11846 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11847
11848 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11849 if (IEM_IS_MODRM_REG_MODE(bRm))
11850 {
11851 /*
11852 * XMM, XMM.
11853 */
11854 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11855 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11857 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11858 IEM_MC_LOCAL(X86XMMREG, Dst);
11859 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11860 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11861 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11862 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11863 IEM_MC_PREPARE_SSE_USAGE();
11864 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11865 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmppd_u128, pDst, pSrc, bImmArg);
11866 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11867 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11868
11869 IEM_MC_ADVANCE_RIP_AND_FINISH();
11870 IEM_MC_END();
11871 }
11872 else
11873 {
11874 /*
11875 * XMM, [mem128].
11876 */
11877 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11878 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11879 IEM_MC_LOCAL(X86XMMREG, Dst);
11880 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11881 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11883
11884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11885 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11886 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11888 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11889 IEM_MC_PREPARE_SSE_USAGE();
11890
11891 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11892 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmppd_u128, pDst, pSrc, bImmArg);
11893 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11894 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11895
11896 IEM_MC_ADVANCE_RIP_AND_FINISH();
11897 IEM_MC_END();
11898 }
11899}
11900
11901
11902/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11903FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11904{
11905 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11906
11907 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11908 if (IEM_IS_MODRM_REG_MODE(bRm))
11909 {
11910 /*
11911 * XMM32, XMM32.
11912 */
11913 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11914 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11916 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11917 IEM_MC_LOCAL(X86XMMREG, Dst);
11918 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11919 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11920 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11921 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11922 IEM_MC_PREPARE_SSE_USAGE();
11923 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11924 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpss_u128, pDst, pSrc, bImmArg);
11925 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11926 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11927
11928 IEM_MC_ADVANCE_RIP_AND_FINISH();
11929 IEM_MC_END();
11930 }
11931 else
11932 {
11933 /*
11934 * XMM32, [mem32].
11935 */
11936 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11937 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11938 IEM_MC_LOCAL(X86XMMREG, Dst);
11939 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11940 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11941 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11942
11943 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11944 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11945 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11947 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11948 IEM_MC_PREPARE_SSE_USAGE();
11949
11950 IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
11951 0 /*a_iDword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11952 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpss_u128, pDst, pSrc, bImmArg);
11953 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11954 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11955
11956 IEM_MC_ADVANCE_RIP_AND_FINISH();
11957 IEM_MC_END();
11958 }
11959}
11960
11961
11962/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11963FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11964{
11965 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11966
11967 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11968 if (IEM_IS_MODRM_REG_MODE(bRm))
11969 {
11970 /*
11971 * XMM64, XMM64.
11972 */
11973 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11974 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11976 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11977 IEM_MC_LOCAL(X86XMMREG, Dst);
11978 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
11979 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
11980 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11981 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11982 IEM_MC_PREPARE_SSE_USAGE();
11983 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11984 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpsd_u128, pDst, pSrc, bImmArg);
11985 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11986 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11987
11988 IEM_MC_ADVANCE_RIP_AND_FINISH();
11989 IEM_MC_END();
11990 }
11991 else
11992 {
11993 /*
11994 * XMM64, [mem64].
11995 */
11996 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
11997 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11998 IEM_MC_LOCAL(X86XMMREG, Dst);
11999 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 0);
12000 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
12001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12002
12003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12004 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12005 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12007 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12008 IEM_MC_PREPARE_SSE_USAGE();
12009
12010 IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
12011 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12012 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cmpsd_u128, pDst, pSrc, bImmArg);
12013 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12014 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
12015
12016 IEM_MC_ADVANCE_RIP_AND_FINISH();
12017 IEM_MC_END();
12018 }
12019}
12020
12021
12022/** Opcode 0x0f 0xc3. */
12023FNIEMOP_DEF(iemOp_movnti_My_Gy)
12024{
12025 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
12026
12027 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12028
12029 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
12030 if (IEM_IS_MODRM_MEM_MODE(bRm))
12031 {
12032 switch (pVCpu->iem.s.enmEffOpSize)
12033 {
12034 case IEMMODE_32BIT:
12035 IEM_MC_BEGIN(IEM_MC_F_MIN_386, 0);
12036 IEM_MC_LOCAL(uint32_t, u32Value);
12037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12038
12039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12041
12042 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12043 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
12044 IEM_MC_ADVANCE_RIP_AND_FINISH();
12045 IEM_MC_END();
12046 break;
12047
12048 case IEMMODE_64BIT:
12049 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
12050 IEM_MC_LOCAL(uint64_t, u64Value);
12051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12052
12053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12055
12056 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12057 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
12058 IEM_MC_ADVANCE_RIP_AND_FINISH();
12059 IEM_MC_END();
12060 break;
12061
12062 case IEMMODE_16BIT:
12063 /** @todo check this form. */
12064 IEMOP_RAISE_INVALID_OPCODE_RET();
12065
12066 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12067 }
12068 }
12069 else
12070 IEMOP_RAISE_INVALID_OPCODE_RET();
12071}
12072
12073
12074/* Opcode 0x66 0x0f 0xc3 - invalid */
12075/* Opcode 0xf3 0x0f 0xc3 - invalid */
12076/* Opcode 0xf2 0x0f 0xc3 - invalid */
12077
12078
12079/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
12080FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
12081{
12082 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12083 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12084 if (IEM_IS_MODRM_REG_MODE(bRm))
12085 {
12086 /*
12087 * Register, register.
12088 */
12089 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12090 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12091 IEM_MC_LOCAL(uint16_t, uValue);
12092
12093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12094 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12095 IEM_MC_PREPARE_FPU_USAGE();
12096 IEM_MC_FPU_TO_MMX_MODE();
12097
12098 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
12099 IEM_MC_STORE_MREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
12100
12101 IEM_MC_ADVANCE_RIP_AND_FINISH();
12102 IEM_MC_END();
12103 }
12104 else
12105 {
12106 /*
12107 * Register, memory.
12108 */
12109 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12111 IEM_MC_LOCAL(uint16_t, uValue);
12112
12113 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12114 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12116 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12117 IEM_MC_PREPARE_FPU_USAGE();
12118
12119 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12120 IEM_MC_FPU_TO_MMX_MODE();
12121 IEM_MC_STORE_MREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
12122
12123 IEM_MC_ADVANCE_RIP_AND_FINISH();
12124 IEM_MC_END();
12125 }
12126}
12127
12128
12129/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
12130FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
12131{
12132 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12133 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12134 if (IEM_IS_MODRM_REG_MODE(bRm))
12135 {
12136 /*
12137 * Register, register.
12138 */
12139 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12140 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12142
12143 IEM_MC_LOCAL(uint16_t, uValue);
12144 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12145 IEM_MC_PREPARE_SSE_USAGE();
12146
12147 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
12148 IEM_MC_STORE_XREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
12149 IEM_MC_ADVANCE_RIP_AND_FINISH();
12150 IEM_MC_END();
12151 }
12152 else
12153 {
12154 /*
12155 * Register, memory.
12156 */
12157 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12159 IEM_MC_LOCAL(uint16_t, uValue);
12160
12161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12162 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12164 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12165 IEM_MC_PREPARE_SSE_USAGE();
12166
12167 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12168 IEM_MC_STORE_XREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
12169 IEM_MC_ADVANCE_RIP_AND_FINISH();
12170 IEM_MC_END();
12171 }
12172}
12173
12174
12175/* Opcode 0xf3 0x0f 0xc4 - invalid */
12176/* Opcode 0xf2 0x0f 0xc4 - invalid */
12177
12178
12179/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
12180FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
12181{
12182 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);*/ /** @todo */
12183 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12184 if (IEM_IS_MODRM_REG_MODE(bRm))
12185 {
12186 /*
12187 * Greg32, MMX, imm8.
12188 */
12189 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12190 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12192 IEM_MC_LOCAL(uint16_t, uValue);
12193 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12194 IEM_MC_PREPARE_FPU_USAGE();
12195 IEM_MC_FPU_TO_MMX_MODE();
12196 IEM_MC_FETCH_MREG_U16(uValue, IEM_GET_MODRM_RM_8(bRm), bImm & 3);
12197 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
12198 IEM_MC_ADVANCE_RIP_AND_FINISH();
12199 IEM_MC_END();
12200 }
12201 /* No memory operand. */
12202 else
12203 IEMOP_RAISE_INVALID_OPCODE_RET();
12204}
12205
12206
12207/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
12208FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
12209{
12210 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12211 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12212 if (IEM_IS_MODRM_REG_MODE(bRm))
12213 {
12214 /*
12215 * Greg32, XMM, imm8.
12216 */
12217 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12218 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12220 IEM_MC_LOCAL(uint16_t, uValue);
12221 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12222 IEM_MC_PREPARE_SSE_USAGE();
12223 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm), bImm & 7);
12224 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
12225 IEM_MC_ADVANCE_RIP_AND_FINISH();
12226 IEM_MC_END();
12227 }
12228 /* No memory operand. */
12229 else
12230 IEMOP_RAISE_INVALID_OPCODE_RET();
12231}
12232
12233
12234/* Opcode 0xf3 0x0f 0xc5 - invalid */
12235/* Opcode 0xf2 0x0f 0xc5 - invalid */
12236
12237
12238/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
12239FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
12240{
12241 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12242 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12243 if (IEM_IS_MODRM_REG_MODE(bRm))
12244 {
12245 /*
12246 * XMM, XMM, imm8.
12247 */
12248 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12249 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12251 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12252 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12253 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12254 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12255 IEM_MC_PREPARE_SSE_USAGE();
12256 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12257 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12258 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12259 IEM_MC_ADVANCE_RIP_AND_FINISH();
12260 IEM_MC_END();
12261 }
12262 else
12263 {
12264 /*
12265 * XMM, [mem128], imm8.
12266 */
12267 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12268 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12269 IEM_MC_LOCAL(RTUINT128U, uSrc);
12270 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12272
12273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12274 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12275 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12277 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12278 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12279
12280 IEM_MC_PREPARE_SSE_USAGE();
12281 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12282 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12283
12284 IEM_MC_ADVANCE_RIP_AND_FINISH();
12285 IEM_MC_END();
12286 }
12287}
12288
12289
12290/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
12291FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
12292{
12293 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12294 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12295 if (IEM_IS_MODRM_REG_MODE(bRm))
12296 {
12297 /*
12298 * XMM, XMM, imm8.
12299 */
12300 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12301 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12303 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12304 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12305 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12306 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12307 IEM_MC_PREPARE_SSE_USAGE();
12308 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12309 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12310 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12311 IEM_MC_ADVANCE_RIP_AND_FINISH();
12312 IEM_MC_END();
12313 }
12314 else
12315 {
12316 /*
12317 * XMM, [mem128], imm8.
12318 */
12319 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12320 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12321 IEM_MC_LOCAL(RTUINT128U, uSrc);
12322 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12324
12325 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12326 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12327 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12329 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12330 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12331
12332 IEM_MC_PREPARE_SSE_USAGE();
12333 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12334 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12335
12336 IEM_MC_ADVANCE_RIP_AND_FINISH();
12337 IEM_MC_END();
12338 }
12339}
12340
12341
12342/* Opcode 0xf3 0x0f 0xc6 - invalid */
12343/* Opcode 0xf2 0x0f 0xc6 - invalid */
12344
12345
12346/**
12347 * @opmaps grp9
12348 * @opcode /1
12349 * @opcodesub !11 mr/reg rex.w=0
12350 * @oppfx n/a
12351 * @opflmodify zf
12352 */
12353FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12354{
12355 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12356#define IEMOP_BODY_CMPXCHG8B(a_fnWorker, a_Type) \
12357 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
12358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12360 IEMOP_HLP_DONE_DECODING_EX(fCmpXchg8b); \
12361 \
12362 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12363 IEM_MC_ARG(uint64_t *, pu64MemDst, 0); \
12364 IEM_MC_MEM_MAP_U64_##a_Type(pu64MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12365 \
12366 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx); \
12367 IEM_MC_FETCH_GREG_PAIR_U32(u64EaxEdx, X86_GREG_xAX, X86_GREG_xDX); \
12368 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EaxEdx, u64EaxEdx, 1); \
12369 \
12370 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx); \
12371 IEM_MC_FETCH_GREG_PAIR_U32(u64EbxEcx, X86_GREG_xBX, X86_GREG_xCX); \
12372 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EbxEcx, u64EbxEcx, 2); \
12373 \
12374 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
12375 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags); \
12376 \
12377 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12378 IEM_MC_COMMIT_EFLAGS(EFlags); \
12379 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12380 IEM_MC_STORE_GREG_PAIR_U32(X86_GREG_xAX, X86_GREG_xDX, u64EaxEdx); \
12381 } IEM_MC_ENDIF(); \
12382 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12383 \
12384 IEM_MC_END()
12385 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12386 {
12387 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b,RW);
12388 }
12389 else
12390 {
12391 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b_locked,ATOMIC);
12392 }
12393}
12394
12395
12396/**
12397 * @opmaps grp9
12398 * @opcode /1
12399 * @opcodesub !11 mr/reg rex.w=1
12400 * @oppfx n/a
12401 * @opflmodify zf
12402 */
12403FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12404{
12405 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12406 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12407 {
12408 /*
12409 * This is hairy, very hairy macro fun. We're walking a fine line
12410 * here to make the code parsable by IEMAllInstPython.py and fit into
12411 * the patterns IEMAllThrdPython.py requires for the code morphing.
12412 */
12413#define BODY_CMPXCHG16B_HEAD(bUnmapInfoStmt, a_Type) \
12414 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
12415 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12417 IEMOP_HLP_DONE_DECODING(); \
12418 \
12419 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16); \
12420 bUnmapInfoStmt; \
12421 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0); \
12422 IEM_MC_MEM_MAP_U128_##a_Type(pu128MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12423 \
12424 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx); \
12425 IEM_MC_FETCH_GREG_PAIR_U64(u128RaxRdx, X86_GREG_xAX, X86_GREG_xDX); \
12426 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RaxRdx, u128RaxRdx, 1); \
12427 \
12428 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx); \
12429 IEM_MC_FETCH_GREG_PAIR_U64(u128RbxRcx, X86_GREG_xBX, X86_GREG_xCX); \
12430 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RbxRcx, u128RbxRcx, 2); \
12431 \
12432 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3)
12433
12434#define BODY_CMPXCHG16B_TAIL(a_Type) \
12435 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12436 IEM_MC_COMMIT_EFLAGS(EFlags); \
12437 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12438 IEM_MC_STORE_GREG_PAIR_U64(X86_GREG_xAX, X86_GREG_xDX, u128RaxRdx); \
12439 } IEM_MC_ENDIF(); \
12440 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12441 IEM_MC_END()
12442
12443#ifdef RT_ARCH_AMD64
12444 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12445 {
12446 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12447 {
12448 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12449 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12450 BODY_CMPXCHG16B_TAIL(RW);
12451 }
12452 else
12453 {
12454 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12455 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12456 BODY_CMPXCHG16B_TAIL(ATOMIC);
12457 }
12458 }
12459 else
12460 { /* (see comments in #else case below) */
12461 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12462 {
12463 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12464 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12465 BODY_CMPXCHG16B_TAIL(RW);
12466 }
12467 else
12468 {
12469 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12470 IEM_MC_CALL_CIMPL_5(IEM_CIMPL_F_STATUS_FLAGS,
12471 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12472 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12473 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx,
12474 pEFlags, bUnmapInfo);
12475 IEM_MC_END();
12476 }
12477 }
12478
12479#elif defined(RT_ARCH_ARM64)
12480 /** @todo may require fallback for unaligned accesses... */
12481 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12482 {
12483 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12484 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12485 BODY_CMPXCHG16B_TAIL(RW);
12486 }
12487 else
12488 {
12489 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12490 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12491 BODY_CMPXCHG16B_TAIL(ATOMIC);
12492 }
12493
12494#else
12495 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12496 accesses and not all all atomic, which works fine on in UNI CPU guest
12497 configuration (ignoring DMA). If guest SMP is active we have no choice
12498 but to use a rendezvous callback here. Sigh. */
12499 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12500 {
12501 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12502 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12503 BODY_CMPXCHG16B_TAIL(RW);
12504 }
12505 else
12506 {
12507 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12508 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12509 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12510 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12511 iemCImpl_cmpxchg16b_fallback_rendezvous,
12512 pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12513 IEM_MC_END();
12514 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12515 }
12516#endif
12517
12518#undef BODY_CMPXCHG16B
12519 }
12520 Log(("cmpxchg16b -> #UD\n"));
12521 IEMOP_RAISE_INVALID_OPCODE_RET();
12522}
12523
12524FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12525{
12526 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12527 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12528 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12529}
12530
12531
12532/** Opcode 0x0f 0xc7 11/6. */
12533FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12534{
12535 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12536 IEMOP_RAISE_INVALID_OPCODE_RET();
12537
12538 if (IEM_IS_MODRM_REG_MODE(bRm))
12539 {
12540 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12542 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12543 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12544 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12545 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
12546 iemCImpl_rdrand, iReg, enmEffOpSize);
12547 IEM_MC_END();
12548 }
12549 /* Register only. */
12550 else
12551 IEMOP_RAISE_INVALID_OPCODE_RET();
12552}
12553
12554/** Opcode 0x0f 0xc7 !11/6. */
12555#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12556FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12557{
12558 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12559 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12560 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12561 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12562 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12564 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12565 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12566 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12567 IEM_MC_END();
12568}
12569#else
12570FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12571#endif
12572
12573/** Opcode 0x66 0x0f 0xc7 !11/6. */
12574#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12575FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12576{
12577 IEMOP_MNEMONIC(vmclear, "vmclear");
12578 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12579 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12580 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12581 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12583 IEMOP_HLP_DONE_DECODING();
12584 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12585 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12586 IEM_MC_END();
12587}
12588#else
12589FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12590#endif
12591
12592/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12593#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12594FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12595{
12596 IEMOP_MNEMONIC(vmxon, "vmxon");
12597 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12598 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12599 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12601 IEMOP_HLP_DONE_DECODING();
12602 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12603 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12604 IEM_MC_END();
12605}
12606#else
12607FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12608#endif
12609
12610/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12611#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12612FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12613{
12614 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12615 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12616 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12617 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12618 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12620 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12621 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12622 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12623 IEM_MC_END();
12624}
12625#else
12626FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12627#endif
12628
12629/** Opcode 0x0f 0xc7 11/7. */
12630FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
12631{
12632 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
12633 IEMOP_RAISE_INVALID_OPCODE_RET();
12634
12635 if (IEM_IS_MODRM_REG_MODE(bRm))
12636 {
12637 /* register destination. */
12638 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12640 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12641 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12642 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12643 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
12644 iemCImpl_rdseed, iReg, enmEffOpSize);
12645 IEM_MC_END();
12646 }
12647 /* Register only. */
12648 else
12649 IEMOP_RAISE_INVALID_OPCODE_RET();
12650}
12651
12652/**
12653 * Group 9 jump table for register variant.
12654 */
12655IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12656{ /* pfx: none, 066h, 0f3h, 0f2h */
12657 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12658 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12659 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12660 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12661 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12662 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12663 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12664 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12665};
12666AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12667
12668
12669/**
12670 * Group 9 jump table for memory variant.
12671 */
12672IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12673{ /* pfx: none, 066h, 0f3h, 0f2h */
12674 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12675 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12676 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12677 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12678 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12679 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12680 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12681 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12682};
12683AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12684
12685
12686/** Opcode 0x0f 0xc7. */
12687FNIEMOP_DEF(iemOp_Grp9)
12688{
12689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12690 if (IEM_IS_MODRM_REG_MODE(bRm))
12691 /* register, register */
12692 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12693 + pVCpu->iem.s.idxPrefix], bRm);
12694 /* memory, register */
12695 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12696 + pVCpu->iem.s.idxPrefix], bRm);
12697}
12698
12699
12700/**
12701 * Common 'bswap register' helper.
12702 */
12703FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12704{
12705 switch (pVCpu->iem.s.enmEffOpSize)
12706 {
12707 case IEMMODE_16BIT:
12708 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
12709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12710 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12711 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12712 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12713 IEM_MC_ADVANCE_RIP_AND_FINISH();
12714 IEM_MC_END();
12715 break;
12716
12717 case IEMMODE_32BIT:
12718 IEM_MC_BEGIN(IEM_MC_F_MIN_486, 0);
12719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12720 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12721 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12722 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12723 IEM_MC_CLEAR_HIGH_GREG_U64(iReg);
12724 IEM_MC_ADVANCE_RIP_AND_FINISH();
12725 IEM_MC_END();
12726 break;
12727
12728 case IEMMODE_64BIT:
12729 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
12730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12731 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12732 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12733 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12734 IEM_MC_ADVANCE_RIP_AND_FINISH();
12735 IEM_MC_END();
12736 break;
12737
12738 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12739 }
12740}
12741
12742
12743/** Opcode 0x0f 0xc8. */
12744FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12745{
12746 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12747 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12748 prefix. REX.B is the correct prefix it appears. For a parallel
12749 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12750 IEMOP_HLP_MIN_486();
12751 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12752}
12753
12754
12755/** Opcode 0x0f 0xc9. */
12756FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12757{
12758 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12759 IEMOP_HLP_MIN_486();
12760 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12761}
12762
12763
12764/** Opcode 0x0f 0xca. */
12765FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12766{
12767 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
12768 IEMOP_HLP_MIN_486();
12769 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12770}
12771
12772
12773/** Opcode 0x0f 0xcb. */
12774FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12775{
12776 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
12777 IEMOP_HLP_MIN_486();
12778 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12779}
12780
12781
12782/** Opcode 0x0f 0xcc. */
12783FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12784{
12785 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12786 IEMOP_HLP_MIN_486();
12787 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12788}
12789
12790
12791/** Opcode 0x0f 0xcd. */
12792FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12793{
12794 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12795 IEMOP_HLP_MIN_486();
12796 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12797}
12798
12799
12800/** Opcode 0x0f 0xce. */
12801FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12802{
12803 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12804 IEMOP_HLP_MIN_486();
12805 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12806}
12807
12808
12809/** Opcode 0x0f 0xcf. */
12810FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12811{
12812 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12813 IEMOP_HLP_MIN_486();
12814 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12815}
12816
12817
12818/* Opcode 0x0f 0xd0 - invalid */
12819
12820
12821/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12822FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12823{
12824 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12825 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12826}
12827
12828
12829/* Opcode 0xf3 0x0f 0xd0 - invalid */
12830
12831
12832/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12833FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12834{
12835 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12836 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12837}
12838
12839
12840
12841/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12842FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12843{
12844 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12845 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12846}
12847
12848/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12849FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12850{
12851 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12852 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12853}
12854
12855/* Opcode 0xf3 0x0f 0xd1 - invalid */
12856/* Opcode 0xf2 0x0f 0xd1 - invalid */
12857
12858/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12859FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12860{
12861 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12862 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12863}
12864
12865
12866/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12867FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12868{
12869 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12870 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12871}
12872
12873
12874/* Opcode 0xf3 0x0f 0xd2 - invalid */
12875/* Opcode 0xf2 0x0f 0xd2 - invalid */
12876
12877/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12878FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12879{
12880 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12881 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12882}
12883
12884
12885/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12886FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12887{
12888 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12889 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12890}
12891
12892
12893/* Opcode 0xf3 0x0f 0xd3 - invalid */
12894/* Opcode 0xf2 0x0f 0xd3 - invalid */
12895
12896
12897/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12898FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12899{
12900 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12901 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, iemAImpl_paddq_u64);
12902}
12903
12904
12905/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12906FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12907{
12908 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12909 SSE2_OPT_BODY_FullFull_To_Full(paddq, iemAImpl_paddq_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
12910}
12911
12912
12913/* Opcode 0xf3 0x0f 0xd4 - invalid */
12914/* Opcode 0xf2 0x0f 0xd4 - invalid */
12915
12916/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12917FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12918{
12919 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12920 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmullw_u64);
12921}
12922
12923/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12924FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12925{
12926 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12927 SSE2_OPT_BODY_FullFull_To_Full(pmullw, iemAImpl_pmullw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
12928}
12929
12930
12931/* Opcode 0xf3 0x0f 0xd5 - invalid */
12932/* Opcode 0xf2 0x0f 0xd5 - invalid */
12933
12934/* Opcode 0x0f 0xd6 - invalid */
12935
12936/**
12937 * @opcode 0xd6
12938 * @oppfx 0x66
12939 * @opcpuid sse2
12940 * @opgroup og_sse2_pcksclr_datamove
12941 * @opxcpttype none
12942 * @optest op1=-1 op2=2 -> op1=2
12943 * @optest op1=0 op2=-42 -> op1=-42
12944 */
12945FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12946{
12947 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12948 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12949 if (IEM_IS_MODRM_REG_MODE(bRm))
12950 {
12951 /*
12952 * Register, register.
12953 */
12954 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12956 IEM_MC_LOCAL(uint64_t, uSrc);
12957
12958 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12959 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12960
12961 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12962 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
12963
12964 IEM_MC_ADVANCE_RIP_AND_FINISH();
12965 IEM_MC_END();
12966 }
12967 else
12968 {
12969 /*
12970 * Memory, register.
12971 */
12972 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
12973 IEM_MC_LOCAL(uint64_t, uSrc);
12974 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12975
12976 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12978 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12979 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12980
12981 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12982 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12983
12984 IEM_MC_ADVANCE_RIP_AND_FINISH();
12985 IEM_MC_END();
12986 }
12987}
12988
12989
12990/**
12991 * @opcode 0xd6
12992 * @opcodesub 11 mr/reg
12993 * @oppfx f3
12994 * @opcpuid sse2
12995 * @opgroup og_sse2_simdint_datamove
12996 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12997 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12998 */
12999FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
13000{
13001 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13002 if (IEM_IS_MODRM_REG_MODE(bRm))
13003 {
13004 /*
13005 * Register, register.
13006 */
13007 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13008 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13010 IEM_MC_LOCAL(uint64_t, uSrc);
13011
13012 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13013 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13014 IEM_MC_FPU_TO_MMX_MODE();
13015
13016 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
13017 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
13018
13019 IEM_MC_ADVANCE_RIP_AND_FINISH();
13020 IEM_MC_END();
13021 }
13022
13023 /**
13024 * @opdone
13025 * @opmnemonic udf30fd6mem
13026 * @opcode 0xd6
13027 * @opcodesub !11 mr/reg
13028 * @oppfx f3
13029 * @opunused intel-modrm
13030 * @opcpuid sse
13031 * @optest ->
13032 */
13033 else
13034 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13035}
13036
13037
13038/**
13039 * @opcode 0xd6
13040 * @opcodesub 11 mr/reg
13041 * @oppfx f2
13042 * @opcpuid sse2
13043 * @opgroup og_sse2_simdint_datamove
13044 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13045 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13046 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
13047 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
13048 * @optest op1=-42 op2=0xfedcba9876543210
13049 * -> op1=0xfedcba9876543210 ftw=0xff
13050 */
13051FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
13052{
13053 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13054 if (IEM_IS_MODRM_REG_MODE(bRm))
13055 {
13056 /*
13057 * Register, register.
13058 */
13059 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13060 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13062 IEM_MC_LOCAL(uint64_t, uSrc);
13063
13064 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13065 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13066 IEM_MC_FPU_TO_MMX_MODE();
13067
13068 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
13069 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
13070
13071 IEM_MC_ADVANCE_RIP_AND_FINISH();
13072 IEM_MC_END();
13073 }
13074
13075 /**
13076 * @opdone
13077 * @opmnemonic udf20fd6mem
13078 * @opcode 0xd6
13079 * @opcodesub !11 mr/reg
13080 * @oppfx f2
13081 * @opunused intel-modrm
13082 * @opcpuid sse
13083 * @optest ->
13084 */
13085 else
13086 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13087}
13088
13089
13090/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
13091FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
13092{
13093 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13094 /* Docs says register only. */
13095 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13096 {
13097 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13098 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_X86_MMX | DISOPTYPE_HARMLESS, 0);
13099 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13101 IEM_MC_ARG(uint64_t *, puDst, 0);
13102 IEM_MC_ARG(uint64_t const *, puSrc, 1);
13103 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13104 IEM_MC_PREPARE_FPU_USAGE();
13105 IEM_MC_FPU_TO_MMX_MODE();
13106
13107 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13108 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
13109 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
13110
13111 IEM_MC_ADVANCE_RIP_AND_FINISH();
13112 IEM_MC_END();
13113 }
13114 else
13115 IEMOP_RAISE_INVALID_OPCODE_RET();
13116}
13117
13118
13119/** Opcode 0x66 0x0f 0xd7 - */
13120FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
13121{
13122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13123 /* Docs says register only. */
13124 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13125 {
13126 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13127 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
13128 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13130 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13131 IEM_MC_PREPARE_SSE_USAGE();
13132 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
13133 IEM_MC_NATIVE_EMIT_2(iemNativeEmit_pmovmskb_rr_u128, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
13134 } IEM_MC_NATIVE_ELSE() {
13135 IEM_MC_ARG(uint64_t *, puDst, 0);
13136 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
13137 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13138 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
13139 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
13140 } IEM_MC_NATIVE_ENDIF();
13141 IEM_MC_ADVANCE_RIP_AND_FINISH();
13142 IEM_MC_END();
13143 }
13144 else
13145 IEMOP_RAISE_INVALID_OPCODE_RET();
13146}
13147
13148
13149/* Opcode 0xf3 0x0f 0xd7 - invalid */
13150/* Opcode 0xf2 0x0f 0xd7 - invalid */
13151
13152
13153/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
13154FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
13155{
13156 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13157 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubusb_u64);
13158}
13159
13160
13161/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
13162FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
13163{
13164 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13165 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubusb_u128);
13166}
13167
13168
13169/* Opcode 0xf3 0x0f 0xd8 - invalid */
13170/* Opcode 0xf2 0x0f 0xd8 - invalid */
13171
13172/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
13173FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
13174{
13175 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13176 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubusw_u64);
13177}
13178
13179
13180/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
13181FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
13182{
13183 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13184 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubusw_u128);
13185}
13186
13187
13188/* Opcode 0xf3 0x0f 0xd9 - invalid */
13189/* Opcode 0xf2 0x0f 0xd9 - invalid */
13190
13191/** Opcode 0x0f 0xda - pminub Pq, Qq */
13192FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
13193{
13194 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13195 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pminub_u64);
13196}
13197
13198
13199/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
13200FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
13201{
13202 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13203 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pminub_u128);
13204}
13205
13206/* Opcode 0xf3 0x0f 0xda - invalid */
13207/* Opcode 0xf2 0x0f 0xda - invalid */
13208
13209/** Opcode 0x0f 0xdb - pand Pq, Qq */
13210FNIEMOP_DEF(iemOp_pand_Pq_Qq)
13211{
13212 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13213 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pand_u64);
13214}
13215
13216
13217/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
13218FNIEMOP_DEF(iemOp_pand_Vx_Wx)
13219{
13220 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13221 SSE2_OPT_BODY_FullFull_To_Full(pand, iemAImpl_pand_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13222}
13223
13224
13225/* Opcode 0xf3 0x0f 0xdb - invalid */
13226/* Opcode 0xf2 0x0f 0xdb - invalid */
13227
13228/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
13229FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
13230{
13231 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13232 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddusb_u64);
13233}
13234
13235
13236/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
13237FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
13238{
13239 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13240 SSE2_OPT_BODY_FullFull_To_Full(paddusb, iemAImpl_paddusb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13241}
13242
13243
13244/* Opcode 0xf3 0x0f 0xdc - invalid */
13245/* Opcode 0xf2 0x0f 0xdc - invalid */
13246
13247/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
13248FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
13249{
13250 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13251 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddusw_u64);
13252}
13253
13254
13255/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
13256FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
13257{
13258 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13259 SSE2_OPT_BODY_FullFull_To_Full(paddusw, iemAImpl_paddusw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13260}
13261
13262
13263/* Opcode 0xf3 0x0f 0xdd - invalid */
13264/* Opcode 0xf2 0x0f 0xdd - invalid */
13265
13266/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
13267FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
13268{
13269 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13270 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmaxub_u64);
13271}
13272
13273
13274/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
13275FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
13276{
13277 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13278 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaxub_u128);
13279}
13280
13281/* Opcode 0xf3 0x0f 0xde - invalid */
13282/* Opcode 0xf2 0x0f 0xde - invalid */
13283
13284
13285/** Opcode 0x0f 0xdf - pandn Pq, Qq */
13286FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
13287{
13288 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13289 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pandn_u64);
13290}
13291
13292
13293/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
13294FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
13295{
13296 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13297 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pandn_u128);
13298}
13299
13300
13301/* Opcode 0xf3 0x0f 0xdf - invalid */
13302/* Opcode 0xf2 0x0f 0xdf - invalid */
13303
13304/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
13305FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
13306{
13307 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13308 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
13309}
13310
13311
13312/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
13313FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
13314{
13315 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13316 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
13317}
13318
13319
13320/* Opcode 0xf3 0x0f 0xe0 - invalid */
13321/* Opcode 0xf2 0x0f 0xe0 - invalid */
13322
13323/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
13324FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
13325{
13326 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13327 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
13328}
13329
13330
13331/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13332FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13333{
13334 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13335 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13336}
13337
13338
13339/* Opcode 0xf3 0x0f 0xe1 - invalid */
13340/* Opcode 0xf2 0x0f 0xe1 - invalid */
13341
13342/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13343FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13344{
13345 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13346 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13347}
13348
13349
13350/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13351FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13352{
13353 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13354 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13355}
13356
13357
13358/* Opcode 0xf3 0x0f 0xe2 - invalid */
13359/* Opcode 0xf2 0x0f 0xe2 - invalid */
13360
13361/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13362FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13363{
13364 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13365 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13366}
13367
13368
13369/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13370FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13371{
13372 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13373 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13374}
13375
13376
13377/* Opcode 0xf3 0x0f 0xe3 - invalid */
13378/* Opcode 0xf2 0x0f 0xe3 - invalid */
13379
13380/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13381FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13382{
13383 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13384 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13385}
13386
13387
13388/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13389FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13390{
13391 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13392 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13393}
13394
13395
13396/* Opcode 0xf3 0x0f 0xe4 - invalid */
13397/* Opcode 0xf2 0x0f 0xe4 - invalid */
13398
13399/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13400FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13401{
13402 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13403 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmulhw_u64);
13404}
13405
13406
13407/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13408FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13409{
13410 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13411 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhw_u128);
13412}
13413
13414
13415/* Opcode 0xf3 0x0f 0xe5 - invalid */
13416/* Opcode 0xf2 0x0f 0xe5 - invalid */
13417/* Opcode 0x0f 0xe6 - invalid */
13418
13419
13420/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13421FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13422{
13423 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13424 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13425}
13426
13427
13428/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13429FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13430{
13431 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13432 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13433}
13434
13435
13436/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13437FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13438{
13439 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13440 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13441}
13442
13443
13444/**
13445 * @opcode 0xe7
13446 * @opcodesub !11 mr/reg
13447 * @oppfx none
13448 * @opcpuid sse
13449 * @opgroup og_sse1_cachect
13450 * @opxcpttype none
13451 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13452 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13453 */
13454FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13455{
13456 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13457 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13458 if (IEM_IS_MODRM_MEM_MODE(bRm))
13459 {
13460 /* Register, memory. */
13461 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13462 IEM_MC_LOCAL(uint64_t, uSrc);
13463 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13464
13465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
13467 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13468 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13469 IEM_MC_FPU_TO_MMX_MODE();
13470
13471 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13472 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13473
13474 IEM_MC_ADVANCE_RIP_AND_FINISH();
13475 IEM_MC_END();
13476 }
13477 /**
13478 * @opdone
13479 * @opmnemonic ud0fe7reg
13480 * @opcode 0xe7
13481 * @opcodesub 11 mr/reg
13482 * @oppfx none
13483 * @opunused immediate
13484 * @opcpuid sse
13485 * @optest ->
13486 */
13487 else
13488 IEMOP_RAISE_INVALID_OPCODE_RET();
13489}
13490
13491/**
13492 * @opcode 0xe7
13493 * @opcodesub !11 mr/reg
13494 * @oppfx 0x66
13495 * @opcpuid sse2
13496 * @opgroup og_sse2_cachect
13497 * @opxcpttype 1
13498 * @optest op1=-1 op2=2 -> op1=2
13499 * @optest op1=0 op2=-42 -> op1=-42
13500 */
13501FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13502{
13503 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13504 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13505 if (IEM_IS_MODRM_MEM_MODE(bRm))
13506 {
13507 /* Register, memory. */
13508 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13509 IEM_MC_LOCAL(RTUINT128U, uSrc);
13510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13511
13512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13514 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13515 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13516
13517 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13518 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13519
13520 IEM_MC_ADVANCE_RIP_AND_FINISH();
13521 IEM_MC_END();
13522 }
13523
13524 /**
13525 * @opdone
13526 * @opmnemonic ud660fe7reg
13527 * @opcode 0xe7
13528 * @opcodesub 11 mr/reg
13529 * @oppfx 0x66
13530 * @opunused immediate
13531 * @opcpuid sse
13532 * @optest ->
13533 */
13534 else
13535 IEMOP_RAISE_INVALID_OPCODE_RET();
13536}
13537
13538/* Opcode 0xf3 0x0f 0xe7 - invalid */
13539/* Opcode 0xf2 0x0f 0xe7 - invalid */
13540
13541
13542/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13543FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13544{
13545 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13546 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubsb_u64);
13547}
13548
13549
13550/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13551FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13552{
13553 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13554 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubsb_u128);
13555}
13556
13557
13558/* Opcode 0xf3 0x0f 0xe8 - invalid */
13559/* Opcode 0xf2 0x0f 0xe8 - invalid */
13560
13561/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13562FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13563{
13564 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13565 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubsw_u64);
13566}
13567
13568
13569/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13570FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13571{
13572 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13573 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psubsw_u128);
13574}
13575
13576
13577/* Opcode 0xf3 0x0f 0xe9 - invalid */
13578/* Opcode 0xf2 0x0f 0xe9 - invalid */
13579
13580
13581/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13582FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13583{
13584 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13585 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pminsw_u64);
13586}
13587
13588
13589/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13590FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13591{
13592 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13593 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pminsw_u128);
13594}
13595
13596
13597/* Opcode 0xf3 0x0f 0xea - invalid */
13598/* Opcode 0xf2 0x0f 0xea - invalid */
13599
13600
13601/** Opcode 0x0f 0xeb - por Pq, Qq */
13602FNIEMOP_DEF(iemOp_por_Pq_Qq)
13603{
13604 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13605 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_por_u64);
13606}
13607
13608
13609/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13610FNIEMOP_DEF(iemOp_por_Vx_Wx)
13611{
13612 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13613 SSE2_OPT_BODY_FullFull_To_Full(por, iemAImpl_por_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13614}
13615
13616
13617/* Opcode 0xf3 0x0f 0xeb - invalid */
13618/* Opcode 0xf2 0x0f 0xeb - invalid */
13619
13620/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13621FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13622{
13623 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13624 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddsb_u64);
13625}
13626
13627
13628/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13629FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13630{
13631 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13632 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddsb_u128);
13633}
13634
13635
13636/* Opcode 0xf3 0x0f 0xec - invalid */
13637/* Opcode 0xf2 0x0f 0xec - invalid */
13638
13639/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13640FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13641{
13642 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13643 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddsw_u64);
13644}
13645
13646
13647/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13648FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13649{
13650 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13651 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_paddsw_u128);
13652}
13653
13654
13655/* Opcode 0xf3 0x0f 0xed - invalid */
13656/* Opcode 0xf2 0x0f 0xed - invalid */
13657
13658
13659/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13660FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13661{
13662 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13663 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13664}
13665
13666
13667/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13668FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13669{
13670 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13671 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13672}
13673
13674
13675/* Opcode 0xf3 0x0f 0xee - invalid */
13676/* Opcode 0xf2 0x0f 0xee - invalid */
13677
13678
13679/** Opcode 0x0f 0xef - pxor Pq, Qq */
13680FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13681{
13682 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13683 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pxor_u64);
13684}
13685
13686
13687/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13688FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13689{
13690 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13691 SSE2_OPT_BODY_FullFull_To_Full(pxor, iemAImpl_pxor_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13692}
13693
13694
13695/* Opcode 0xf3 0x0f 0xef - invalid */
13696/* Opcode 0xf2 0x0f 0xef - invalid */
13697
13698/* Opcode 0x0f 0xf0 - invalid */
13699/* Opcode 0x66 0x0f 0xf0 - invalid */
13700
13701
13702/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13703FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13704{
13705 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13706 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13707 if (IEM_IS_MODRM_REG_MODE(bRm))
13708 {
13709 /*
13710 * Register, register - (not implemented, assuming it raises \#UD).
13711 */
13712 IEMOP_RAISE_INVALID_OPCODE_RET();
13713 }
13714 else
13715 {
13716 /*
13717 * Register, memory.
13718 */
13719 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13720 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13722
13723 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
13725 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13726 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13727 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13728 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13729
13730 IEM_MC_ADVANCE_RIP_AND_FINISH();
13731 IEM_MC_END();
13732 }
13733}
13734
13735
13736/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13737FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13738{
13739 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13740 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13741}
13742
13743
13744/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13745FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13746{
13747 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13748 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13749}
13750
13751
13752/* Opcode 0xf2 0x0f 0xf1 - invalid */
13753
13754/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13755FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13756{
13757 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13758 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13759}
13760
13761
13762/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13763FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13764{
13765 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13766 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13767}
13768
13769
13770/* Opcode 0xf2 0x0f 0xf2 - invalid */
13771
13772/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13773FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13774{
13775 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13776 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13777}
13778
13779
13780/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13781FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13782{
13783 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13784 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13785}
13786
13787/* Opcode 0xf2 0x0f 0xf3 - invalid */
13788
13789/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13790FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13791{
13792 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13793 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmuludq_u64);
13794}
13795
13796
13797/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13798FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13799{
13800 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13801 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmuludq_u128);
13802}
13803
13804
13805/* Opcode 0xf2 0x0f 0xf4 - invalid */
13806
13807/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13808FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13809{
13810 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13811 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13812}
13813
13814
13815/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13816FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13817{
13818 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13819 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13820}
13821
13822/* Opcode 0xf2 0x0f 0xf5 - invalid */
13823
13824/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13825FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13826{
13827 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13828 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13829}
13830
13831
13832/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13833FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13834{
13835 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13836 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13837}
13838
13839
13840/* Opcode 0xf2 0x0f 0xf6 - invalid */
13841
13842/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13843FNIEMOP_DEF(iemOp_maskmovq_Pq_Nq)
13844{
13845// IEMOP_MNEMONIC2(RM, MASKMOVQ, maskmovq, Pq, Nq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
13846 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13847 if (IEM_IS_MODRM_REG_MODE(bRm))
13848 {
13849 /*
13850 * MMX, MMX, (implicit) [ ER]DI
13851 */
13852 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13854 IEM_MC_LOCAL( uint64_t, u64EffAddr);
13855 IEM_MC_LOCAL( uint64_t, u64Mem);
13856 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Mem, u64Mem, 0);
13857 IEM_MC_ARG( uint64_t const *, puSrc, 1);
13858 IEM_MC_ARG( uint64_t const *, puMsk, 2);
13859 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13860 IEM_MC_PREPARE_FPU_USAGE();
13861 IEM_MC_FPU_TO_MMX_MODE();
13862
13863 IEM_MC_FETCH_GREG_U64(u64EffAddr, X86_GREG_xDI);
13864 IEM_MC_FETCH_MEM_U64(u64Mem, pVCpu->iem.s.iEffSeg, u64EffAddr);
13865 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_REG_8(bRm));
13866 IEM_MC_REF_MREG_U64_CONST(puMsk, IEM_GET_MODRM_RM_8(bRm));
13867 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_maskmovq_u64, pu64Mem, puSrc, puMsk);
13868 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, u64EffAddr, u64Mem);
13869
13870 IEM_MC_ADVANCE_RIP_AND_FINISH();
13871 IEM_MC_END();
13872 }
13873 else
13874 {
13875 /* The memory, register encoding is invalid. */
13876 IEMOP_RAISE_INVALID_OPCODE_RET();
13877 }
13878}
13879
13880
13881/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13882FNIEMOP_DEF(iemOp_maskmovdqu_Vdq_Udq)
13883{
13884// IEMOP_MNEMONIC2(RM, MASKMOVDQU, maskmovdqu, Vdq, Udq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
13885 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13886 if (IEM_IS_MODRM_REG_MODE(bRm))
13887 {
13888 /*
13889 * XMM, XMM, (implicit) [ ER]DI
13890 */
13891 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
13892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13893 IEM_MC_LOCAL( uint64_t, u64EffAddr);
13894 IEM_MC_LOCAL( RTUINT128U, u128Mem);
13895 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128Mem, u128Mem, 0);
13896 IEM_MC_ARG( PCRTUINT128U, puSrc, 1);
13897 IEM_MC_ARG( PCRTUINT128U, puMsk, 2);
13898 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13899 IEM_MC_PREPARE_SSE_USAGE();
13900
13901 IEM_MC_FETCH_GREG_U64(u64EffAddr, X86_GREG_xDI);
13902 IEM_MC_FETCH_MEM_U128(u128Mem, pVCpu->iem.s.iEffSeg, u64EffAddr);
13903 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13904 IEM_MC_REF_XREG_U128_CONST(puMsk, IEM_GET_MODRM_RM(pVCpu, bRm));
13905 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_maskmovdqu_u128, pu128Mem, puSrc, puMsk);
13906 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, u64EffAddr, u128Mem);
13907
13908 IEM_MC_ADVANCE_RIP_AND_FINISH();
13909 IEM_MC_END();
13910 }
13911 else
13912 {
13913 /* The memory, register encoding is invalid. */
13914 IEMOP_RAISE_INVALID_OPCODE_RET();
13915 }
13916}
13917
13918
13919/* Opcode 0xf2 0x0f 0xf7 - invalid */
13920
13921
13922/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13923FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13924{
13925 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13926 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubb_u64);
13927}
13928
13929
13930/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13931FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13932{
13933 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13934 SSE2_OPT_BODY_FullFull_To_Full(psubb, iemAImpl_psubb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13935}
13936
13937
13938/* Opcode 0xf2 0x0f 0xf8 - invalid */
13939
13940
13941/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13942FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13943{
13944 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13945 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubw_u64);
13946}
13947
13948
13949/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13950FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13951{
13952 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13953 SSE2_OPT_BODY_FullFull_To_Full(psubw, iemAImpl_psubw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13954}
13955
13956
13957/* Opcode 0xf2 0x0f 0xf9 - invalid */
13958
13959
13960/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13961FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13962{
13963 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13964 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psubd_u64);
13965}
13966
13967
13968/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13969FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13970{
13971 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13972 SSE2_OPT_BODY_FullFull_To_Full(psubd, iemAImpl_psubd_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13973}
13974
13975
13976/* Opcode 0xf2 0x0f 0xfa - invalid */
13977
13978
13979/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13980FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13981{
13982 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13983 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full_Sse2, iemAImpl_psubq_u64);
13984}
13985
13986
13987/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13988FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13989{
13990 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13991 SSE2_OPT_BODY_FullFull_To_Full(psubq, iemAImpl_psubq_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
13992}
13993
13994
13995/* Opcode 0xf2 0x0f 0xfb - invalid */
13996
13997
13998/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13999FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
14000{
14001 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14002 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddb_u64);
14003}
14004
14005
14006/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
14007FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
14008{
14009 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14010 SSE2_OPT_BODY_FullFull_To_Full(paddb, iemAImpl_paddb_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
14011}
14012
14013
14014/* Opcode 0xf2 0x0f 0xfc - invalid */
14015
14016
14017/** Opcode 0x0f 0xfd - paddw Pq, Qq */
14018FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
14019{
14020 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14021 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddw_u64);
14022}
14023
14024
14025/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
14026FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
14027{
14028 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14029 SSE2_OPT_BODY_FullFull_To_Full(paddw, iemAImpl_paddw_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
14030}
14031
14032
14033/* Opcode 0xf2 0x0f 0xfd - invalid */
14034
14035
14036/** Opcode 0x0f 0xfe - paddd Pq, Qq */
14037FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
14038{
14039 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14040 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_paddd_u64);
14041}
14042
14043
14044/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
14045FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
14046{
14047 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14048 SSE2_OPT_BODY_FullFull_To_Full(paddd, iemAImpl_paddd_u128, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
14049}
14050
14051
14052/* Opcode 0xf2 0x0f 0xfe - invalid */
14053
14054
14055/** Opcode **** 0x0f 0xff - UD0 */
14056FNIEMOP_DEF(iemOp_ud0)
14057{
14058 IEMOP_MNEMONIC(ud0, "ud0");
14059 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
14060 {
14061 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
14062 if (IEM_IS_MODRM_MEM_MODE(bRm))
14063 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
14064 }
14065 IEMOP_HLP_DONE_DECODING();
14066 IEMOP_RAISE_INVALID_OPCODE_RET();
14067}
14068
14069
14070
14071/**
14072 * Two byte opcode map, first byte 0x0f.
14073 *
14074 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
14075 * check if it needs updating as well when making changes.
14076 */
14077const PFNIEMOP g_apfnTwoByteMap[] =
14078{
14079 /* no prefix, 066h prefix f3h prefix, f2h prefix */
14080 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
14081 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
14082 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
14083 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
14084 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
14085 /* 0x05 */ IEMOP_X4(iemOp_syscall),
14086 /* 0x06 */ IEMOP_X4(iemOp_clts),
14087 /* 0x07 */ IEMOP_X4(iemOp_sysret),
14088 /* 0x08 */ IEMOP_X4(iemOp_invd),
14089 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
14090 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
14091 /* 0x0b */ IEMOP_X4(iemOp_ud2),
14092 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
14093 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
14094 /* 0x0e */ IEMOP_X4(iemOp_femms),
14095 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
14096
14097 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
14098 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
14099 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
14100 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14101 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14102 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14103 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
14104 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14105 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
14106 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
14107 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
14108 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
14109 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
14110 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
14111 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
14112 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
14113
14114 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
14115 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
14116 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
14117 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
14118 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
14119 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14120 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
14121 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14122 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14123 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14124 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
14125 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14126 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
14127 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
14128 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14129 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14130
14131 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
14132 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
14133 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
14134 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
14135 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
14136 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
14137 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
14138 /* 0x37 */ IEMOP_X4(iemOp_getsec),
14139 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
14140 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14141 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
14142 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14143 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14144 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14145 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14146 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14147
14148 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
14149 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
14150 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
14151 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
14152 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
14153 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
14154 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
14155 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
14156 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
14157 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
14158 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
14159 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
14160 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
14161 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
14162 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
14163 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
14164
14165 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14166 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
14167 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
14168 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
14169 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14170 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14171 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14172 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14173 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
14174 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
14175 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
14176 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
14177 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
14178 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
14179 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
14180 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
14181
14182 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14183 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14184 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14185 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14186 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14187 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14188 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14189 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14190 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14191 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14192 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14193 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14194 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14195 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14196 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14197 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
14198
14199 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
14200 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
14201 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
14202 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
14203 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14204 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14205 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14206 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14207
14208 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14209 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14210 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14211 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14212 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
14213 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
14214 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
14215 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
14216
14217 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
14218 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
14219 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
14220 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
14221 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
14222 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
14223 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
14224 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
14225 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
14226 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
14227 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
14228 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
14229 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
14230 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
14231 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
14232 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
14233
14234 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
14235 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
14236 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
14237 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
14238 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
14239 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
14240 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
14241 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
14242 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
14243 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
14244 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
14245 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
14246 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
14247 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
14248 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
14249 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
14250
14251 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
14252 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
14253 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
14254 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
14255 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
14256 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
14257 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
14258 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
14259 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
14260 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
14261 /* 0xaa */ IEMOP_X4(iemOp_rsm),
14262 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
14263 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
14264 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
14265 /* 0xae */ IEMOP_X4(iemOp_Grp15),
14266 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
14267
14268 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
14269 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
14270 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
14271 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
14272 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
14273 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
14274 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
14275 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
14276 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
14277 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
14278 /* 0xba */ IEMOP_X4(iemOp_Grp8),
14279 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
14280 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
14281 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
14282 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
14283 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
14284
14285 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
14286 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
14287 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
14288 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14289 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14290 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14291 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14292 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
14293 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
14294 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
14295 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
14296 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
14297 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
14298 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
14299 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
14300 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
14301
14302 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
14303 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14304 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14305 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14306 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14307 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14308 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
14309 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14310 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14311 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14312 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14313 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14314 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14315 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14316 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14317 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14318
14319 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14320 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14321 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14322 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14323 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14324 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14325 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
14326 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14327 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14328 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14329 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14330 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14331 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14332 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14333 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14334 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14335
14336 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
14337 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14338 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14339 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14340 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14341 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14342 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14343 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14344 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14345 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14346 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14347 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14348 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14349 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14350 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14351 /* 0xff */ IEMOP_X4(iemOp_ud0),
14352};
14353AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
14354
14355/** @} */
14356
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette