VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 95442

Last change on this file since 95442 was 95441, checked in by vboxsync, 3 years ago

VMM/IEM: [v]andps, [v]andpd, [v]pand, [v]andnps, [v]andnpd, [v]pandn, [v]orps, [v]orpd, and [v]por. bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 355.2 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 95441 2022-06-29 22:40:14Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27
28/**
29 * Common worker for MMX instructions on the form:
30 * pxxx mm1, mm2/mem64
31 */
32FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
33{
34 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
35 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
36 {
37 /*
38 * Register, register.
39 */
40 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
41 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
42 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
43 IEM_MC_BEGIN(2, 0);
44 IEM_MC_ARG(uint64_t *, pDst, 0);
45 IEM_MC_ARG(uint64_t const *, pSrc, 1);
46 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
47 IEM_MC_PREPARE_FPU_USAGE();
48 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
49 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
50 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
51 IEM_MC_FPU_TO_MMX_MODE();
52 IEM_MC_ADVANCE_RIP();
53 IEM_MC_END();
54 }
55 else
56 {
57 /*
58 * Register, memory.
59 */
60 IEM_MC_BEGIN(2, 2);
61 IEM_MC_ARG(uint64_t *, pDst, 0);
62 IEM_MC_LOCAL(uint64_t, uSrc);
63 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
64 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
65
66 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
67 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
68 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
69 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
70
71 IEM_MC_PREPARE_FPU_USAGE();
72 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
73 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
74 IEM_MC_FPU_TO_MMX_MODE();
75
76 IEM_MC_ADVANCE_RIP();
77 IEM_MC_END();
78 }
79 return VINF_SUCCESS;
80}
81
82
83/**
84 * Common worker for SSE2 instructions on the forms:
85 * pxxx xmm1, xmm2/mem128
86 *
87 * Proper alignment of the 128-bit operand is enforced.
88 * Exceptions type 4. SSE2 cpuid checks.
89 */
90FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
91{
92 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
93 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
94 {
95 /*
96 * Register, register.
97 */
98 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
99 IEM_MC_BEGIN(2, 0);
100 IEM_MC_ARG(PRTUINT128U, pDst, 0);
101 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
102 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
103 IEM_MC_PREPARE_SSE_USAGE();
104 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
105 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
106 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
107 IEM_MC_ADVANCE_RIP();
108 IEM_MC_END();
109 }
110 else
111 {
112 /*
113 * Register, memory.
114 */
115 IEM_MC_BEGIN(2, 2);
116 IEM_MC_ARG(PRTUINT128U, pDst, 0);
117 IEM_MC_LOCAL(RTUINT128U, uSrc);
118 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
120
121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
123 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
124 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
125
126 IEM_MC_PREPARE_SSE_USAGE();
127 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
128 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
129
130 IEM_MC_ADVANCE_RIP();
131 IEM_MC_END();
132 }
133 return VINF_SUCCESS;
134}
135
136
137/** Opcode 0x0f 0x00 /0. */
138FNIEMOPRM_DEF(iemOp_Grp6_sldt)
139{
140 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
141 IEMOP_HLP_MIN_286();
142 IEMOP_HLP_NO_REAL_OR_V86_MODE();
143
144 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
145 {
146 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
147 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
148 }
149
150 /* Ignore operand size here, memory refs are always 16-bit. */
151 IEM_MC_BEGIN(2, 0);
152 IEM_MC_ARG(uint16_t, iEffSeg, 0);
153 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
154 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
155 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
156 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
157 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
158 IEM_MC_END();
159 return VINF_SUCCESS;
160}
161
162
163/** Opcode 0x0f 0x00 /1. */
164FNIEMOPRM_DEF(iemOp_Grp6_str)
165{
166 IEMOP_MNEMONIC(str, "str Rv/Mw");
167 IEMOP_HLP_MIN_286();
168 IEMOP_HLP_NO_REAL_OR_V86_MODE();
169
170
171 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
172 {
173 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
174 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
175 }
176
177 /* Ignore operand size here, memory refs are always 16-bit. */
178 IEM_MC_BEGIN(2, 0);
179 IEM_MC_ARG(uint16_t, iEffSeg, 0);
180 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
182 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
183 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
184 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
185 IEM_MC_END();
186 return VINF_SUCCESS;
187}
188
189
190/** Opcode 0x0f 0x00 /2. */
191FNIEMOPRM_DEF(iemOp_Grp6_lldt)
192{
193 IEMOP_MNEMONIC(lldt, "lldt Ew");
194 IEMOP_HLP_MIN_286();
195 IEMOP_HLP_NO_REAL_OR_V86_MODE();
196
197 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
198 {
199 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
200 IEM_MC_BEGIN(1, 0);
201 IEM_MC_ARG(uint16_t, u16Sel, 0);
202 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
203 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
204 IEM_MC_END();
205 }
206 else
207 {
208 IEM_MC_BEGIN(1, 1);
209 IEM_MC_ARG(uint16_t, u16Sel, 0);
210 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
212 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
213 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
214 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
215 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
216 IEM_MC_END();
217 }
218 return VINF_SUCCESS;
219}
220
221
222/** Opcode 0x0f 0x00 /3. */
223FNIEMOPRM_DEF(iemOp_Grp6_ltr)
224{
225 IEMOP_MNEMONIC(ltr, "ltr Ew");
226 IEMOP_HLP_MIN_286();
227 IEMOP_HLP_NO_REAL_OR_V86_MODE();
228
229 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
230 {
231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
232 IEM_MC_BEGIN(1, 0);
233 IEM_MC_ARG(uint16_t, u16Sel, 0);
234 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
235 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
236 IEM_MC_END();
237 }
238 else
239 {
240 IEM_MC_BEGIN(1, 1);
241 IEM_MC_ARG(uint16_t, u16Sel, 0);
242 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
245 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
246 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
247 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
248 IEM_MC_END();
249 }
250 return VINF_SUCCESS;
251}
252
253
254/** Opcode 0x0f 0x00 /3. */
255FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
256{
257 IEMOP_HLP_MIN_286();
258 IEMOP_HLP_NO_REAL_OR_V86_MODE();
259
260 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
261 {
262 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
263 IEM_MC_BEGIN(2, 0);
264 IEM_MC_ARG(uint16_t, u16Sel, 0);
265 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
266 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
267 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
268 IEM_MC_END();
269 }
270 else
271 {
272 IEM_MC_BEGIN(2, 1);
273 IEM_MC_ARG(uint16_t, u16Sel, 0);
274 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
277 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
278 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
279 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
280 IEM_MC_END();
281 }
282 return VINF_SUCCESS;
283}
284
285
286/** Opcode 0x0f 0x00 /4. */
287FNIEMOPRM_DEF(iemOp_Grp6_verr)
288{
289 IEMOP_MNEMONIC(verr, "verr Ew");
290 IEMOP_HLP_MIN_286();
291 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
292}
293
294
295/** Opcode 0x0f 0x00 /5. */
296FNIEMOPRM_DEF(iemOp_Grp6_verw)
297{
298 IEMOP_MNEMONIC(verw, "verw Ew");
299 IEMOP_HLP_MIN_286();
300 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
301}
302
303
304/**
305 * Group 6 jump table.
306 */
307IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
308{
309 iemOp_Grp6_sldt,
310 iemOp_Grp6_str,
311 iemOp_Grp6_lldt,
312 iemOp_Grp6_ltr,
313 iemOp_Grp6_verr,
314 iemOp_Grp6_verw,
315 iemOp_InvalidWithRM,
316 iemOp_InvalidWithRM
317};
318
319/** Opcode 0x0f 0x00. */
320FNIEMOP_DEF(iemOp_Grp6)
321{
322 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
323 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
324}
325
326
327/** Opcode 0x0f 0x01 /0. */
328FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
329{
330 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
331 IEMOP_HLP_MIN_286();
332 IEMOP_HLP_64BIT_OP_SIZE();
333 IEM_MC_BEGIN(2, 1);
334 IEM_MC_ARG(uint8_t, iEffSeg, 0);
335 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
338 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
339 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
340 IEM_MC_END();
341 return VINF_SUCCESS;
342}
343
344
345/** Opcode 0x0f 0x01 /0. */
346FNIEMOP_DEF(iemOp_Grp7_vmcall)
347{
348 IEMOP_MNEMONIC(vmcall, "vmcall");
349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
350
351 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
352 want all hypercalls regardless of instruction used, and if a
353 hypercall isn't handled by GIM or HMSvm will raise an #UD.
354 (NEM/win makes ASSUMPTIONS about this behavior.) */
355 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
356}
357
358
359/** Opcode 0x0f 0x01 /0. */
360#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
361FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
362{
363 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
364 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
365 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
366 IEMOP_HLP_DONE_DECODING();
367 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
368}
369#else
370FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
371{
372 IEMOP_BITCH_ABOUT_STUB();
373 return IEMOP_RAISE_INVALID_OPCODE();
374}
375#endif
376
377
378/** Opcode 0x0f 0x01 /0. */
379#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
380FNIEMOP_DEF(iemOp_Grp7_vmresume)
381{
382 IEMOP_MNEMONIC(vmresume, "vmresume");
383 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
384 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
385 IEMOP_HLP_DONE_DECODING();
386 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume);
387}
388#else
389FNIEMOP_DEF(iemOp_Grp7_vmresume)
390{
391 IEMOP_BITCH_ABOUT_STUB();
392 return IEMOP_RAISE_INVALID_OPCODE();
393}
394#endif
395
396
397/** Opcode 0x0f 0x01 /0. */
398#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
399FNIEMOP_DEF(iemOp_Grp7_vmxoff)
400{
401 IEMOP_MNEMONIC(vmxoff, "vmxoff");
402 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
403 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
404 IEMOP_HLP_DONE_DECODING();
405 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
406}
407#else
408FNIEMOP_DEF(iemOp_Grp7_vmxoff)
409{
410 IEMOP_BITCH_ABOUT_STUB();
411 return IEMOP_RAISE_INVALID_OPCODE();
412}
413#endif
414
415
416/** Opcode 0x0f 0x01 /1. */
417FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
418{
419 IEMOP_MNEMONIC(sidt, "sidt Ms");
420 IEMOP_HLP_MIN_286();
421 IEMOP_HLP_64BIT_OP_SIZE();
422 IEM_MC_BEGIN(2, 1);
423 IEM_MC_ARG(uint8_t, iEffSeg, 0);
424 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
427 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
428 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
429 IEM_MC_END();
430 return VINF_SUCCESS;
431}
432
433
434/** Opcode 0x0f 0x01 /1. */
435FNIEMOP_DEF(iemOp_Grp7_monitor)
436{
437 IEMOP_MNEMONIC(monitor, "monitor");
438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
439 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
440}
441
442
443/** Opcode 0x0f 0x01 /1. */
444FNIEMOP_DEF(iemOp_Grp7_mwait)
445{
446 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
448 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
449}
450
451
452/** Opcode 0x0f 0x01 /2. */
453FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
454{
455 IEMOP_MNEMONIC(lgdt, "lgdt");
456 IEMOP_HLP_64BIT_OP_SIZE();
457 IEM_MC_BEGIN(3, 1);
458 IEM_MC_ARG(uint8_t, iEffSeg, 0);
459 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
460 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
463 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
464 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
465 IEM_MC_END();
466 return VINF_SUCCESS;
467}
468
469
470/** Opcode 0x0f 0x01 0xd0. */
471FNIEMOP_DEF(iemOp_Grp7_xgetbv)
472{
473 IEMOP_MNEMONIC(xgetbv, "xgetbv");
474 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
475 {
476 /** @todo r=ramshankar: We should use
477 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
478 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
479 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
480 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
481 }
482 return IEMOP_RAISE_INVALID_OPCODE();
483}
484
485
486/** Opcode 0x0f 0x01 0xd1. */
487FNIEMOP_DEF(iemOp_Grp7_xsetbv)
488{
489 IEMOP_MNEMONIC(xsetbv, "xsetbv");
490 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
491 {
492 /** @todo r=ramshankar: We should use
493 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
494 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
495 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
496 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
497 }
498 return IEMOP_RAISE_INVALID_OPCODE();
499}
500
501
502/** Opcode 0x0f 0x01 /3. */
503FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
504{
505 IEMOP_MNEMONIC(lidt, "lidt");
506 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
507 ? IEMMODE_64BIT
508 : pVCpu->iem.s.enmEffOpSize;
509 IEM_MC_BEGIN(3, 1);
510 IEM_MC_ARG(uint8_t, iEffSeg, 0);
511 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
512 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
515 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
516 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
517 IEM_MC_END();
518 return VINF_SUCCESS;
519}
520
521
522/** Opcode 0x0f 0x01 0xd8. */
523#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
524FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
525{
526 IEMOP_MNEMONIC(vmrun, "vmrun");
527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
528 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
529}
530#else
531FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
532#endif
533
534/** Opcode 0x0f 0x01 0xd9. */
535FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
536{
537 IEMOP_MNEMONIC(vmmcall, "vmmcall");
538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
539
540 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
541 want all hypercalls regardless of instruction used, and if a
542 hypercall isn't handled by GIM or HMSvm will raise an #UD.
543 (NEM/win makes ASSUMPTIONS about this behavior.) */
544 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
545}
546
547/** Opcode 0x0f 0x01 0xda. */
548#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
549FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
550{
551 IEMOP_MNEMONIC(vmload, "vmload");
552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
553 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
554}
555#else
556FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
557#endif
558
559
560/** Opcode 0x0f 0x01 0xdb. */
561#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
562FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
563{
564 IEMOP_MNEMONIC(vmsave, "vmsave");
565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
566 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
567}
568#else
569FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
570#endif
571
572
573/** Opcode 0x0f 0x01 0xdc. */
574#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
575FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
576{
577 IEMOP_MNEMONIC(stgi, "stgi");
578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
579 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
580}
581#else
582FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
583#endif
584
585
586/** Opcode 0x0f 0x01 0xdd. */
587#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
588FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
589{
590 IEMOP_MNEMONIC(clgi, "clgi");
591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
592 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
593}
594#else
595FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
596#endif
597
598
599/** Opcode 0x0f 0x01 0xdf. */
600#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
601FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
602{
603 IEMOP_MNEMONIC(invlpga, "invlpga");
604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
605 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
606}
607#else
608FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
609#endif
610
611
612/** Opcode 0x0f 0x01 0xde. */
613#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
614FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
615{
616 IEMOP_MNEMONIC(skinit, "skinit");
617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
618 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
619}
620#else
621FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
622#endif
623
624
625/** Opcode 0x0f 0x01 /4. */
626FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
627{
628 IEMOP_MNEMONIC(smsw, "smsw");
629 IEMOP_HLP_MIN_286();
630 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
631 {
632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
633 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
634 }
635
636 /* Ignore operand size here, memory refs are always 16-bit. */
637 IEM_MC_BEGIN(2, 0);
638 IEM_MC_ARG(uint16_t, iEffSeg, 0);
639 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
642 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
643 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
644 IEM_MC_END();
645 return VINF_SUCCESS;
646}
647
648
649/** Opcode 0x0f 0x01 /6. */
650FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
651{
652 /* The operand size is effectively ignored, all is 16-bit and only the
653 lower 3-bits are used. */
654 IEMOP_MNEMONIC(lmsw, "lmsw");
655 IEMOP_HLP_MIN_286();
656 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
657 {
658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
659 IEM_MC_BEGIN(2, 0);
660 IEM_MC_ARG(uint16_t, u16Tmp, 0);
661 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
662 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
663 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
664 IEM_MC_END();
665 }
666 else
667 {
668 IEM_MC_BEGIN(2, 0);
669 IEM_MC_ARG(uint16_t, u16Tmp, 0);
670 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
673 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
674 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
675 IEM_MC_END();
676 }
677 return VINF_SUCCESS;
678}
679
680
681/** Opcode 0x0f 0x01 /7. */
682FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
683{
684 IEMOP_MNEMONIC(invlpg, "invlpg");
685 IEMOP_HLP_MIN_486();
686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
687 IEM_MC_BEGIN(1, 1);
688 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
690 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
691 IEM_MC_END();
692 return VINF_SUCCESS;
693}
694
695
696/** Opcode 0x0f 0x01 /7. */
697FNIEMOP_DEF(iemOp_Grp7_swapgs)
698{
699 IEMOP_MNEMONIC(swapgs, "swapgs");
700 IEMOP_HLP_ONLY_64BIT();
701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
702 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
703}
704
705
706/** Opcode 0x0f 0x01 /7. */
707FNIEMOP_DEF(iemOp_Grp7_rdtscp)
708{
709 IEMOP_MNEMONIC(rdtscp, "rdtscp");
710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
711 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
712}
713
714
715/**
716 * Group 7 jump table, memory variant.
717 */
718IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
719{
720 iemOp_Grp7_sgdt,
721 iemOp_Grp7_sidt,
722 iemOp_Grp7_lgdt,
723 iemOp_Grp7_lidt,
724 iemOp_Grp7_smsw,
725 iemOp_InvalidWithRM,
726 iemOp_Grp7_lmsw,
727 iemOp_Grp7_invlpg
728};
729
730
731/** Opcode 0x0f 0x01. */
732FNIEMOP_DEF(iemOp_Grp7)
733{
734 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
735 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
736 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
737
738 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
739 {
740 case 0:
741 switch (bRm & X86_MODRM_RM_MASK)
742 {
743 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
744 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
745 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
746 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
747 }
748 return IEMOP_RAISE_INVALID_OPCODE();
749
750 case 1:
751 switch (bRm & X86_MODRM_RM_MASK)
752 {
753 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
754 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
755 }
756 return IEMOP_RAISE_INVALID_OPCODE();
757
758 case 2:
759 switch (bRm & X86_MODRM_RM_MASK)
760 {
761 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
762 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
763 }
764 return IEMOP_RAISE_INVALID_OPCODE();
765
766 case 3:
767 switch (bRm & X86_MODRM_RM_MASK)
768 {
769 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
770 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
771 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
772 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
773 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
774 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
775 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
776 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
777 IEM_NOT_REACHED_DEFAULT_CASE_RET();
778 }
779
780 case 4:
781 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
782
783 case 5:
784 return IEMOP_RAISE_INVALID_OPCODE();
785
786 case 6:
787 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
788
789 case 7:
790 switch (bRm & X86_MODRM_RM_MASK)
791 {
792 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
793 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
794 }
795 return IEMOP_RAISE_INVALID_OPCODE();
796
797 IEM_NOT_REACHED_DEFAULT_CASE_RET();
798 }
799}
800
801/** Opcode 0x0f 0x00 /3. */
802FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
803{
804 IEMOP_HLP_NO_REAL_OR_V86_MODE();
805 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
806
807 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
808 {
809 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
810 switch (pVCpu->iem.s.enmEffOpSize)
811 {
812 case IEMMODE_16BIT:
813 {
814 IEM_MC_BEGIN(3, 0);
815 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
816 IEM_MC_ARG(uint16_t, u16Sel, 1);
817 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
818
819 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
820 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
821 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
822
823 IEM_MC_END();
824 return VINF_SUCCESS;
825 }
826
827 case IEMMODE_32BIT:
828 case IEMMODE_64BIT:
829 {
830 IEM_MC_BEGIN(3, 0);
831 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
832 IEM_MC_ARG(uint16_t, u16Sel, 1);
833 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
834
835 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
836 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
837 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
838
839 IEM_MC_END();
840 return VINF_SUCCESS;
841 }
842
843 IEM_NOT_REACHED_DEFAULT_CASE_RET();
844 }
845 }
846 else
847 {
848 switch (pVCpu->iem.s.enmEffOpSize)
849 {
850 case IEMMODE_16BIT:
851 {
852 IEM_MC_BEGIN(3, 1);
853 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
854 IEM_MC_ARG(uint16_t, u16Sel, 1);
855 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
856 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
857
858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
859 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
860
861 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
862 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
863 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
864
865 IEM_MC_END();
866 return VINF_SUCCESS;
867 }
868
869 case IEMMODE_32BIT:
870 case IEMMODE_64BIT:
871 {
872 IEM_MC_BEGIN(3, 1);
873 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
874 IEM_MC_ARG(uint16_t, u16Sel, 1);
875 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
876 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
877
878 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
879 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
880/** @todo testcase: make sure it's a 16-bit read. */
881
882 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
883 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
884 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
885
886 IEM_MC_END();
887 return VINF_SUCCESS;
888 }
889
890 IEM_NOT_REACHED_DEFAULT_CASE_RET();
891 }
892 }
893}
894
895
896
897/** Opcode 0x0f 0x02. */
898FNIEMOP_DEF(iemOp_lar_Gv_Ew)
899{
900 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
901 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
902}
903
904
905/** Opcode 0x0f 0x03. */
906FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
907{
908 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
909 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
910}
911
912
913/** Opcode 0x0f 0x05. */
914FNIEMOP_DEF(iemOp_syscall)
915{
916 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
918 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
919}
920
921
922/** Opcode 0x0f 0x06. */
923FNIEMOP_DEF(iemOp_clts)
924{
925 IEMOP_MNEMONIC(clts, "clts");
926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
927 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
928}
929
930
931/** Opcode 0x0f 0x07. */
932FNIEMOP_DEF(iemOp_sysret)
933{
934 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
936 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
937}
938
939
940/** Opcode 0x0f 0x08. */
941FNIEMOP_DEF(iemOp_invd)
942{
943 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
944 IEMOP_HLP_MIN_486();
945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
946 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
947}
948
949
950/** Opcode 0x0f 0x09. */
951FNIEMOP_DEF(iemOp_wbinvd)
952{
953 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
954 IEMOP_HLP_MIN_486();
955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
956 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
957}
958
959
960/** Opcode 0x0f 0x0b. */
961FNIEMOP_DEF(iemOp_ud2)
962{
963 IEMOP_MNEMONIC(ud2, "ud2");
964 return IEMOP_RAISE_INVALID_OPCODE();
965}
966
967/** Opcode 0x0f 0x0d. */
968FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
969{
970 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
971 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
972 {
973 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
974 return IEMOP_RAISE_INVALID_OPCODE();
975 }
976
977 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
978 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
979 {
980 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
981 return IEMOP_RAISE_INVALID_OPCODE();
982 }
983
984 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
985 {
986 case 2: /* Aliased to /0 for the time being. */
987 case 4: /* Aliased to /0 for the time being. */
988 case 5: /* Aliased to /0 for the time being. */
989 case 6: /* Aliased to /0 for the time being. */
990 case 7: /* Aliased to /0 for the time being. */
991 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
992 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
993 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
994 IEM_NOT_REACHED_DEFAULT_CASE_RET();
995 }
996
997 IEM_MC_BEGIN(0, 1);
998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
999 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1001 /* Currently a NOP. */
1002 NOREF(GCPtrEffSrc);
1003 IEM_MC_ADVANCE_RIP();
1004 IEM_MC_END();
1005 return VINF_SUCCESS;
1006}
1007
1008
1009/** Opcode 0x0f 0x0e. */
1010FNIEMOP_DEF(iemOp_femms)
1011{
1012 IEMOP_MNEMONIC(femms, "femms");
1013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1014
1015 IEM_MC_BEGIN(0,0);
1016 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
1017 IEM_MC_MAYBE_RAISE_FPU_XCPT();
1018 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
1019 IEM_MC_FPU_FROM_MMX_MODE();
1020 IEM_MC_ADVANCE_RIP();
1021 IEM_MC_END();
1022 return VINF_SUCCESS;
1023}
1024
1025
1026/** Opcode 0x0f 0x0f. */
1027FNIEMOP_DEF(iemOp_3Dnow)
1028{
1029 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1030 {
1031 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1032 return IEMOP_RAISE_INVALID_OPCODE();
1033 }
1034
1035#ifdef IEM_WITH_3DNOW
1036 /* This is pretty sparse, use switch instead of table. */
1037 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1038 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
1039#else
1040 IEMOP_BITCH_ABOUT_STUB();
1041 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1042#endif
1043}
1044
1045
1046/**
1047 * @opcode 0x10
1048 * @oppfx none
1049 * @opcpuid sse
1050 * @opgroup og_sse_simdfp_datamove
1051 * @opxcpttype 4UA
1052 * @optest op1=1 op2=2 -> op1=2
1053 * @optest op1=0 op2=-22 -> op1=-22
1054 */
1055FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1056{
1057 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1058 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1059 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1060 {
1061 /*
1062 * Register, register.
1063 */
1064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1065 IEM_MC_BEGIN(0, 0);
1066 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1067 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1068 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1069 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1070 IEM_MC_ADVANCE_RIP();
1071 IEM_MC_END();
1072 }
1073 else
1074 {
1075 /*
1076 * Memory, register.
1077 */
1078 IEM_MC_BEGIN(0, 2);
1079 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1081
1082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1084 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1085 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1086
1087 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1088 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1089
1090 IEM_MC_ADVANCE_RIP();
1091 IEM_MC_END();
1092 }
1093 return VINF_SUCCESS;
1094
1095}
1096
1097
1098/**
1099 * @opcode 0x10
1100 * @oppfx 0x66
1101 * @opcpuid sse2
1102 * @opgroup og_sse2_pcksclr_datamove
1103 * @opxcpttype 4UA
1104 * @optest op1=1 op2=2 -> op1=2
1105 * @optest op1=0 op2=-42 -> op1=-42
1106 */
1107FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1108{
1109 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1110 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1111 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1112 {
1113 /*
1114 * Register, register.
1115 */
1116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1117 IEM_MC_BEGIN(0, 0);
1118 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1119 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1120 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1121 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1122 IEM_MC_ADVANCE_RIP();
1123 IEM_MC_END();
1124 }
1125 else
1126 {
1127 /*
1128 * Memory, register.
1129 */
1130 IEM_MC_BEGIN(0, 2);
1131 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1133
1134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1136 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1137 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1138
1139 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1140 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1141
1142 IEM_MC_ADVANCE_RIP();
1143 IEM_MC_END();
1144 }
1145 return VINF_SUCCESS;
1146}
1147
1148
1149/**
1150 * @opcode 0x10
1151 * @oppfx 0xf3
1152 * @opcpuid sse
1153 * @opgroup og_sse_simdfp_datamove
1154 * @opxcpttype 5
1155 * @optest op1=1 op2=2 -> op1=2
1156 * @optest op1=0 op2=-22 -> op1=-22
1157 */
1158FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1159{
1160 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1162 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1163 {
1164 /*
1165 * Register, register.
1166 */
1167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1168 IEM_MC_BEGIN(0, 1);
1169 IEM_MC_LOCAL(uint32_t, uSrc);
1170
1171 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1172 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1173 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1174 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1175
1176 IEM_MC_ADVANCE_RIP();
1177 IEM_MC_END();
1178 }
1179 else
1180 {
1181 /*
1182 * Memory, register.
1183 */
1184 IEM_MC_BEGIN(0, 2);
1185 IEM_MC_LOCAL(uint32_t, uSrc);
1186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1187
1188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1190 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1191 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1192
1193 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1194 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1195
1196 IEM_MC_ADVANCE_RIP();
1197 IEM_MC_END();
1198 }
1199 return VINF_SUCCESS;
1200}
1201
1202
1203/**
1204 * @opcode 0x10
1205 * @oppfx 0xf2
1206 * @opcpuid sse2
1207 * @opgroup og_sse2_pcksclr_datamove
1208 * @opxcpttype 5
1209 * @optest op1=1 op2=2 -> op1=2
1210 * @optest op1=0 op2=-42 -> op1=-42
1211 */
1212FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1213{
1214 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1216 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1217 {
1218 /*
1219 * Register, register.
1220 */
1221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1222 IEM_MC_BEGIN(0, 1);
1223 IEM_MC_LOCAL(uint64_t, uSrc);
1224
1225 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1226 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1227 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1228 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1229
1230 IEM_MC_ADVANCE_RIP();
1231 IEM_MC_END();
1232 }
1233 else
1234 {
1235 /*
1236 * Memory, register.
1237 */
1238 IEM_MC_BEGIN(0, 2);
1239 IEM_MC_LOCAL(uint64_t, uSrc);
1240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1241
1242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1244 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1245 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1246
1247 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1248 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1249
1250 IEM_MC_ADVANCE_RIP();
1251 IEM_MC_END();
1252 }
1253 return VINF_SUCCESS;
1254}
1255
1256
1257/**
1258 * @opcode 0x11
1259 * @oppfx none
1260 * @opcpuid sse
1261 * @opgroup og_sse_simdfp_datamove
1262 * @opxcpttype 4UA
1263 * @optest op1=1 op2=2 -> op1=2
1264 * @optest op1=0 op2=-42 -> op1=-42
1265 */
1266FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1267{
1268 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1269 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1270 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1271 {
1272 /*
1273 * Register, register.
1274 */
1275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1276 IEM_MC_BEGIN(0, 0);
1277 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1278 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1279 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1280 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1281 IEM_MC_ADVANCE_RIP();
1282 IEM_MC_END();
1283 }
1284 else
1285 {
1286 /*
1287 * Memory, register.
1288 */
1289 IEM_MC_BEGIN(0, 2);
1290 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1291 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1292
1293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1295 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1296 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1297
1298 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1299 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1300
1301 IEM_MC_ADVANCE_RIP();
1302 IEM_MC_END();
1303 }
1304 return VINF_SUCCESS;
1305}
1306
1307
1308/**
1309 * @opcode 0x11
1310 * @oppfx 0x66
1311 * @opcpuid sse2
1312 * @opgroup og_sse2_pcksclr_datamove
1313 * @opxcpttype 4UA
1314 * @optest op1=1 op2=2 -> op1=2
1315 * @optest op1=0 op2=-42 -> op1=-42
1316 */
1317FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1318{
1319 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1320 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1321 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1322 {
1323 /*
1324 * Register, register.
1325 */
1326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1327 IEM_MC_BEGIN(0, 0);
1328 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1329 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1330 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1331 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1332 IEM_MC_ADVANCE_RIP();
1333 IEM_MC_END();
1334 }
1335 else
1336 {
1337 /*
1338 * Memory, register.
1339 */
1340 IEM_MC_BEGIN(0, 2);
1341 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1343
1344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1346 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1347 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1348
1349 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1350 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1351
1352 IEM_MC_ADVANCE_RIP();
1353 IEM_MC_END();
1354 }
1355 return VINF_SUCCESS;
1356}
1357
1358
1359/**
1360 * @opcode 0x11
1361 * @oppfx 0xf3
1362 * @opcpuid sse
1363 * @opgroup og_sse_simdfp_datamove
1364 * @opxcpttype 5
1365 * @optest op1=1 op2=2 -> op1=2
1366 * @optest op1=0 op2=-22 -> op1=-22
1367 */
1368FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1369{
1370 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1372 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1373 {
1374 /*
1375 * Register, register.
1376 */
1377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1378 IEM_MC_BEGIN(0, 1);
1379 IEM_MC_LOCAL(uint32_t, uSrc);
1380
1381 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1382 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1383 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1384 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1385
1386 IEM_MC_ADVANCE_RIP();
1387 IEM_MC_END();
1388 }
1389 else
1390 {
1391 /*
1392 * Memory, register.
1393 */
1394 IEM_MC_BEGIN(0, 2);
1395 IEM_MC_LOCAL(uint32_t, uSrc);
1396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1397
1398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1400 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1401 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1402
1403 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1404 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1405
1406 IEM_MC_ADVANCE_RIP();
1407 IEM_MC_END();
1408 }
1409 return VINF_SUCCESS;
1410}
1411
1412
1413/**
1414 * @opcode 0x11
1415 * @oppfx 0xf2
1416 * @opcpuid sse2
1417 * @opgroup og_sse2_pcksclr_datamove
1418 * @opxcpttype 5
1419 * @optest op1=1 op2=2 -> op1=2
1420 * @optest op1=0 op2=-42 -> op1=-42
1421 */
1422FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1423{
1424 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1425 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1426 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1427 {
1428 /*
1429 * Register, register.
1430 */
1431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1432 IEM_MC_BEGIN(0, 1);
1433 IEM_MC_LOCAL(uint64_t, uSrc);
1434
1435 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1436 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1437 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1438 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1439
1440 IEM_MC_ADVANCE_RIP();
1441 IEM_MC_END();
1442 }
1443 else
1444 {
1445 /*
1446 * Memory, register.
1447 */
1448 IEM_MC_BEGIN(0, 2);
1449 IEM_MC_LOCAL(uint64_t, uSrc);
1450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1451
1452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1454 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1455 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1456
1457 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1458 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1459
1460 IEM_MC_ADVANCE_RIP();
1461 IEM_MC_END();
1462 }
1463 return VINF_SUCCESS;
1464}
1465
1466
1467FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1468{
1469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1470 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1471 {
1472 /**
1473 * @opcode 0x12
1474 * @opcodesub 11 mr/reg
1475 * @oppfx none
1476 * @opcpuid sse
1477 * @opgroup og_sse_simdfp_datamove
1478 * @opxcpttype 5
1479 * @optest op1=1 op2=2 -> op1=2
1480 * @optest op1=0 op2=-42 -> op1=-42
1481 */
1482 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1483
1484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1485 IEM_MC_BEGIN(0, 1);
1486 IEM_MC_LOCAL(uint64_t, uSrc);
1487
1488 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1489 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1490 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1491 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1492
1493 IEM_MC_ADVANCE_RIP();
1494 IEM_MC_END();
1495 }
1496 else
1497 {
1498 /**
1499 * @opdone
1500 * @opcode 0x12
1501 * @opcodesub !11 mr/reg
1502 * @oppfx none
1503 * @opcpuid sse
1504 * @opgroup og_sse_simdfp_datamove
1505 * @opxcpttype 5
1506 * @optest op1=1 op2=2 -> op1=2
1507 * @optest op1=0 op2=-42 -> op1=-42
1508 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1509 */
1510 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1511
1512 IEM_MC_BEGIN(0, 2);
1513 IEM_MC_LOCAL(uint64_t, uSrc);
1514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1515
1516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1518 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1519 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1520
1521 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1522 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1523
1524 IEM_MC_ADVANCE_RIP();
1525 IEM_MC_END();
1526 }
1527 return VINF_SUCCESS;
1528}
1529
1530
1531/**
1532 * @opcode 0x12
1533 * @opcodesub !11 mr/reg
1534 * @oppfx 0x66
1535 * @opcpuid sse2
1536 * @opgroup og_sse2_pcksclr_datamove
1537 * @opxcpttype 5
1538 * @optest op1=1 op2=2 -> op1=2
1539 * @optest op1=0 op2=-42 -> op1=-42
1540 */
1541FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1542{
1543 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1544 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1545 {
1546 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1547
1548 IEM_MC_BEGIN(0, 2);
1549 IEM_MC_LOCAL(uint64_t, uSrc);
1550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1551
1552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1554 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1555 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1556
1557 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1558 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1559
1560 IEM_MC_ADVANCE_RIP();
1561 IEM_MC_END();
1562 return VINF_SUCCESS;
1563 }
1564
1565 /**
1566 * @opdone
1567 * @opmnemonic ud660f12m3
1568 * @opcode 0x12
1569 * @opcodesub 11 mr/reg
1570 * @oppfx 0x66
1571 * @opunused immediate
1572 * @opcpuid sse
1573 * @optest ->
1574 */
1575 return IEMOP_RAISE_INVALID_OPCODE();
1576}
1577
1578
1579/**
1580 * @opcode 0x12
1581 * @oppfx 0xf3
1582 * @opcpuid sse3
1583 * @opgroup og_sse3_pcksclr_datamove
1584 * @opxcpttype 4
1585 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1586 * op1=0x00000002000000020000000100000001
1587 */
1588FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1589{
1590 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1591 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1592 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1593 {
1594 /*
1595 * Register, register.
1596 */
1597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1598 IEM_MC_BEGIN(2, 0);
1599 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1600 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1601
1602 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1603 IEM_MC_PREPARE_SSE_USAGE();
1604
1605 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1606 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1607 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1608
1609 IEM_MC_ADVANCE_RIP();
1610 IEM_MC_END();
1611 }
1612 else
1613 {
1614 /*
1615 * Register, memory.
1616 */
1617 IEM_MC_BEGIN(2, 2);
1618 IEM_MC_LOCAL(RTUINT128U, uSrc);
1619 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1620 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1621 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1622
1623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1625 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1626 IEM_MC_PREPARE_SSE_USAGE();
1627
1628 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1629 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1630 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1631
1632 IEM_MC_ADVANCE_RIP();
1633 IEM_MC_END();
1634 }
1635 return VINF_SUCCESS;
1636}
1637
1638
1639/**
1640 * @opcode 0x12
1641 * @oppfx 0xf2
1642 * @opcpuid sse3
1643 * @opgroup og_sse3_pcksclr_datamove
1644 * @opxcpttype 5
1645 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1646 * op1=0x22222222111111112222222211111111
1647 */
1648FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1649{
1650 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1651 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1652 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1653 {
1654 /*
1655 * Register, register.
1656 */
1657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1658 IEM_MC_BEGIN(2, 0);
1659 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1660 IEM_MC_ARG(uint64_t, uSrc, 1);
1661
1662 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1663 IEM_MC_PREPARE_SSE_USAGE();
1664
1665 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1666 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1667 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1668
1669 IEM_MC_ADVANCE_RIP();
1670 IEM_MC_END();
1671 }
1672 else
1673 {
1674 /*
1675 * Register, memory.
1676 */
1677 IEM_MC_BEGIN(2, 2);
1678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1679 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1680 IEM_MC_ARG(uint64_t, uSrc, 1);
1681
1682 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1684 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1685 IEM_MC_PREPARE_SSE_USAGE();
1686
1687 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1688 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1689 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1690
1691 IEM_MC_ADVANCE_RIP();
1692 IEM_MC_END();
1693 }
1694 return VINF_SUCCESS;
1695}
1696
1697
1698/**
1699 * @opcode 0x13
1700 * @opcodesub !11 mr/reg
1701 * @oppfx none
1702 * @opcpuid sse
1703 * @opgroup og_sse_simdfp_datamove
1704 * @opxcpttype 5
1705 * @optest op1=1 op2=2 -> op1=2
1706 * @optest op1=0 op2=-42 -> op1=-42
1707 */
1708FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1709{
1710 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1711 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1712 {
1713 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1714
1715 IEM_MC_BEGIN(0, 2);
1716 IEM_MC_LOCAL(uint64_t, uSrc);
1717 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1718
1719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1721 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1722 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1723
1724 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1725 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1726
1727 IEM_MC_ADVANCE_RIP();
1728 IEM_MC_END();
1729 return VINF_SUCCESS;
1730 }
1731
1732 /**
1733 * @opdone
1734 * @opmnemonic ud0f13m3
1735 * @opcode 0x13
1736 * @opcodesub 11 mr/reg
1737 * @oppfx none
1738 * @opunused immediate
1739 * @opcpuid sse
1740 * @optest ->
1741 */
1742 return IEMOP_RAISE_INVALID_OPCODE();
1743}
1744
1745
1746/**
1747 * @opcode 0x13
1748 * @opcodesub !11 mr/reg
1749 * @oppfx 0x66
1750 * @opcpuid sse2
1751 * @opgroup og_sse2_pcksclr_datamove
1752 * @opxcpttype 5
1753 * @optest op1=1 op2=2 -> op1=2
1754 * @optest op1=0 op2=-42 -> op1=-42
1755 */
1756FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1757{
1758 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1759 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1760 {
1761 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1762 IEM_MC_BEGIN(0, 2);
1763 IEM_MC_LOCAL(uint64_t, uSrc);
1764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1765
1766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1768 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1769 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1770
1771 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1772 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1773
1774 IEM_MC_ADVANCE_RIP();
1775 IEM_MC_END();
1776 return VINF_SUCCESS;
1777 }
1778
1779 /**
1780 * @opdone
1781 * @opmnemonic ud660f13m3
1782 * @opcode 0x13
1783 * @opcodesub 11 mr/reg
1784 * @oppfx 0x66
1785 * @opunused immediate
1786 * @opcpuid sse
1787 * @optest ->
1788 */
1789 return IEMOP_RAISE_INVALID_OPCODE();
1790}
1791
1792
1793/**
1794 * @opmnemonic udf30f13
1795 * @opcode 0x13
1796 * @oppfx 0xf3
1797 * @opunused intel-modrm
1798 * @opcpuid sse
1799 * @optest ->
1800 * @opdone
1801 */
1802
1803/**
1804 * @opmnemonic udf20f13
1805 * @opcode 0x13
1806 * @oppfx 0xf2
1807 * @opunused intel-modrm
1808 * @opcpuid sse
1809 * @optest ->
1810 * @opdone
1811 */
1812
1813/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1814FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1815/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1816FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1817
1818/**
1819 * @opdone
1820 * @opmnemonic udf30f14
1821 * @opcode 0x14
1822 * @oppfx 0xf3
1823 * @opunused intel-modrm
1824 * @opcpuid sse
1825 * @optest ->
1826 * @opdone
1827 */
1828
1829/**
1830 * @opmnemonic udf20f14
1831 * @opcode 0x14
1832 * @oppfx 0xf2
1833 * @opunused intel-modrm
1834 * @opcpuid sse
1835 * @optest ->
1836 * @opdone
1837 */
1838
1839/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1840FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1841/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1842FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1843/* Opcode 0xf3 0x0f 0x15 - invalid */
1844/* Opcode 0xf2 0x0f 0x15 - invalid */
1845
1846/**
1847 * @opdone
1848 * @opmnemonic udf30f15
1849 * @opcode 0x15
1850 * @oppfx 0xf3
1851 * @opunused intel-modrm
1852 * @opcpuid sse
1853 * @optest ->
1854 * @opdone
1855 */
1856
1857/**
1858 * @opmnemonic udf20f15
1859 * @opcode 0x15
1860 * @oppfx 0xf2
1861 * @opunused intel-modrm
1862 * @opcpuid sse
1863 * @optest ->
1864 * @opdone
1865 */
1866
1867FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1868{
1869 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1870 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1871 {
1872 /**
1873 * @opcode 0x16
1874 * @opcodesub 11 mr/reg
1875 * @oppfx none
1876 * @opcpuid sse
1877 * @opgroup og_sse_simdfp_datamove
1878 * @opxcpttype 5
1879 * @optest op1=1 op2=2 -> op1=2
1880 * @optest op1=0 op2=-42 -> op1=-42
1881 */
1882 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1883
1884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1885 IEM_MC_BEGIN(0, 1);
1886 IEM_MC_LOCAL(uint64_t, uSrc);
1887
1888 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1889 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1890 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1891 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1892
1893 IEM_MC_ADVANCE_RIP();
1894 IEM_MC_END();
1895 }
1896 else
1897 {
1898 /**
1899 * @opdone
1900 * @opcode 0x16
1901 * @opcodesub !11 mr/reg
1902 * @oppfx none
1903 * @opcpuid sse
1904 * @opgroup og_sse_simdfp_datamove
1905 * @opxcpttype 5
1906 * @optest op1=1 op2=2 -> op1=2
1907 * @optest op1=0 op2=-42 -> op1=-42
1908 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1909 */
1910 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1911
1912 IEM_MC_BEGIN(0, 2);
1913 IEM_MC_LOCAL(uint64_t, uSrc);
1914 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1915
1916 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1918 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1919 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1920
1921 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1922 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1923
1924 IEM_MC_ADVANCE_RIP();
1925 IEM_MC_END();
1926 }
1927 return VINF_SUCCESS;
1928}
1929
1930
1931/**
1932 * @opcode 0x16
1933 * @opcodesub !11 mr/reg
1934 * @oppfx 0x66
1935 * @opcpuid sse2
1936 * @opgroup og_sse2_pcksclr_datamove
1937 * @opxcpttype 5
1938 * @optest op1=1 op2=2 -> op1=2
1939 * @optest op1=0 op2=-42 -> op1=-42
1940 */
1941FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1942{
1943 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1944 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1945 {
1946 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1947 IEM_MC_BEGIN(0, 2);
1948 IEM_MC_LOCAL(uint64_t, uSrc);
1949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1950
1951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1953 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1954 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1955
1956 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1957 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1958
1959 IEM_MC_ADVANCE_RIP();
1960 IEM_MC_END();
1961 return VINF_SUCCESS;
1962 }
1963
1964 /**
1965 * @opdone
1966 * @opmnemonic ud660f16m3
1967 * @opcode 0x16
1968 * @opcodesub 11 mr/reg
1969 * @oppfx 0x66
1970 * @opunused immediate
1971 * @opcpuid sse
1972 * @optest ->
1973 */
1974 return IEMOP_RAISE_INVALID_OPCODE();
1975}
1976
1977
1978/**
1979 * @opcode 0x16
1980 * @oppfx 0xf3
1981 * @opcpuid sse3
1982 * @opgroup og_sse3_pcksclr_datamove
1983 * @opxcpttype 4
1984 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1985 * op1=0x00000002000000020000000100000001
1986 */
1987FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1988{
1989 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1990 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1991 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1992 {
1993 /*
1994 * Register, register.
1995 */
1996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1997 IEM_MC_BEGIN(2, 0);
1998 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1999 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2000
2001 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2002 IEM_MC_PREPARE_SSE_USAGE();
2003
2004 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2005 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2006 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
2007
2008 IEM_MC_ADVANCE_RIP();
2009 IEM_MC_END();
2010 }
2011 else
2012 {
2013 /*
2014 * Register, memory.
2015 */
2016 IEM_MC_BEGIN(2, 2);
2017 IEM_MC_LOCAL(RTUINT128U, uSrc);
2018 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2019 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2020 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2021
2022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2024 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2025 IEM_MC_PREPARE_SSE_USAGE();
2026
2027 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2028 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2029 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
2030
2031 IEM_MC_ADVANCE_RIP();
2032 IEM_MC_END();
2033 }
2034 return VINF_SUCCESS;
2035}
2036
2037/**
2038 * @opdone
2039 * @opmnemonic udf30f16
2040 * @opcode 0x16
2041 * @oppfx 0xf2
2042 * @opunused intel-modrm
2043 * @opcpuid sse
2044 * @optest ->
2045 * @opdone
2046 */
2047
2048
2049/**
2050 * @opcode 0x17
2051 * @opcodesub !11 mr/reg
2052 * @oppfx none
2053 * @opcpuid sse
2054 * @opgroup og_sse_simdfp_datamove
2055 * @opxcpttype 5
2056 * @optest op1=1 op2=2 -> op1=2
2057 * @optest op1=0 op2=-42 -> op1=-42
2058 */
2059FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
2060{
2061 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2062 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2063 {
2064 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2065
2066 IEM_MC_BEGIN(0, 2);
2067 IEM_MC_LOCAL(uint64_t, uSrc);
2068 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2069
2070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2072 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2073 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2074
2075 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2076 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2077
2078 IEM_MC_ADVANCE_RIP();
2079 IEM_MC_END();
2080 return VINF_SUCCESS;
2081 }
2082
2083 /**
2084 * @opdone
2085 * @opmnemonic ud0f17m3
2086 * @opcode 0x17
2087 * @opcodesub 11 mr/reg
2088 * @oppfx none
2089 * @opunused immediate
2090 * @opcpuid sse
2091 * @optest ->
2092 */
2093 return IEMOP_RAISE_INVALID_OPCODE();
2094}
2095
2096
2097/**
2098 * @opcode 0x17
2099 * @opcodesub !11 mr/reg
2100 * @oppfx 0x66
2101 * @opcpuid sse2
2102 * @opgroup og_sse2_pcksclr_datamove
2103 * @opxcpttype 5
2104 * @optest op1=1 op2=2 -> op1=2
2105 * @optest op1=0 op2=-42 -> op1=-42
2106 */
2107FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
2108{
2109 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2110 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2111 {
2112 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2113
2114 IEM_MC_BEGIN(0, 2);
2115 IEM_MC_LOCAL(uint64_t, uSrc);
2116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2117
2118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2120 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2121 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2122
2123 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2124 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2125
2126 IEM_MC_ADVANCE_RIP();
2127 IEM_MC_END();
2128 return VINF_SUCCESS;
2129 }
2130
2131 /**
2132 * @opdone
2133 * @opmnemonic ud660f17m3
2134 * @opcode 0x17
2135 * @opcodesub 11 mr/reg
2136 * @oppfx 0x66
2137 * @opunused immediate
2138 * @opcpuid sse
2139 * @optest ->
2140 */
2141 return IEMOP_RAISE_INVALID_OPCODE();
2142}
2143
2144
2145/**
2146 * @opdone
2147 * @opmnemonic udf30f17
2148 * @opcode 0x17
2149 * @oppfx 0xf3
2150 * @opunused intel-modrm
2151 * @opcpuid sse
2152 * @optest ->
2153 * @opdone
2154 */
2155
2156/**
2157 * @opmnemonic udf20f17
2158 * @opcode 0x17
2159 * @oppfx 0xf2
2160 * @opunused intel-modrm
2161 * @opcpuid sse
2162 * @optest ->
2163 * @opdone
2164 */
2165
2166
2167/** Opcode 0x0f 0x18. */
2168FNIEMOP_DEF(iemOp_prefetch_Grp16)
2169{
2170 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2171 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2172 {
2173 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2174 {
2175 case 4: /* Aliased to /0 for the time being according to AMD. */
2176 case 5: /* Aliased to /0 for the time being according to AMD. */
2177 case 6: /* Aliased to /0 for the time being according to AMD. */
2178 case 7: /* Aliased to /0 for the time being according to AMD. */
2179 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2180 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2181 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2182 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2183 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2184 }
2185
2186 IEM_MC_BEGIN(0, 1);
2187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2190 /* Currently a NOP. */
2191 NOREF(GCPtrEffSrc);
2192 IEM_MC_ADVANCE_RIP();
2193 IEM_MC_END();
2194 return VINF_SUCCESS;
2195 }
2196
2197 return IEMOP_RAISE_INVALID_OPCODE();
2198}
2199
2200
2201/** Opcode 0x0f 0x19..0x1f. */
2202FNIEMOP_DEF(iemOp_nop_Ev)
2203{
2204 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2205 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2206 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2207 {
2208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2209 IEM_MC_BEGIN(0, 0);
2210 IEM_MC_ADVANCE_RIP();
2211 IEM_MC_END();
2212 }
2213 else
2214 {
2215 IEM_MC_BEGIN(0, 1);
2216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2219 /* Currently a NOP. */
2220 NOREF(GCPtrEffSrc);
2221 IEM_MC_ADVANCE_RIP();
2222 IEM_MC_END();
2223 }
2224 return VINF_SUCCESS;
2225}
2226
2227
2228/** Opcode 0x0f 0x20. */
2229FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2230{
2231 /* mod is ignored, as is operand size overrides. */
2232 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2233 IEMOP_HLP_MIN_386();
2234 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2235 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2236 else
2237 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2238
2239 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2240 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2241 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2242 {
2243 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2244 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2245 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2246 iCrReg |= 8;
2247 }
2248 switch (iCrReg)
2249 {
2250 case 0: case 2: case 3: case 4: case 8:
2251 break;
2252 default:
2253 return IEMOP_RAISE_INVALID_OPCODE();
2254 }
2255 IEMOP_HLP_DONE_DECODING();
2256
2257 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2258}
2259
2260
2261/** Opcode 0x0f 0x21. */
2262FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2263{
2264 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2265 IEMOP_HLP_MIN_386();
2266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2268 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2269 return IEMOP_RAISE_INVALID_OPCODE();
2270 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2271 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2272 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2273}
2274
2275
2276/** Opcode 0x0f 0x22. */
2277FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2278{
2279 /* mod is ignored, as is operand size overrides. */
2280 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2281 IEMOP_HLP_MIN_386();
2282 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2283 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2284 else
2285 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2286
2287 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2288 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2289 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2290 {
2291 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2292 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2293 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2294 iCrReg |= 8;
2295 }
2296 switch (iCrReg)
2297 {
2298 case 0: case 2: case 3: case 4: case 8:
2299 break;
2300 default:
2301 return IEMOP_RAISE_INVALID_OPCODE();
2302 }
2303 IEMOP_HLP_DONE_DECODING();
2304
2305 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2306}
2307
2308
2309/** Opcode 0x0f 0x23. */
2310FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2311{
2312 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2313 IEMOP_HLP_MIN_386();
2314 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2316 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2317 return IEMOP_RAISE_INVALID_OPCODE();
2318 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2319 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2320 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2321}
2322
2323
2324/** Opcode 0x0f 0x24. */
2325FNIEMOP_DEF(iemOp_mov_Rd_Td)
2326{
2327 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2328 IEMOP_HLP_MIN_386();
2329 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2331 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
2332 return IEMOP_RAISE_INVALID_OPCODE();
2333 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Td,
2334 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2335 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2336}
2337
2338
2339/** Opcode 0x0f 0x26. */
2340FNIEMOP_DEF(iemOp_mov_Td_Rd)
2341{
2342 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2343 IEMOP_HLP_MIN_386();
2344 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2346 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
2347 return IEMOP_RAISE_INVALID_OPCODE();
2348 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Td_Rd,
2349 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2350 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2351}
2352
2353
2354/**
2355 * @opcode 0x28
2356 * @oppfx none
2357 * @opcpuid sse
2358 * @opgroup og_sse_simdfp_datamove
2359 * @opxcpttype 1
2360 * @optest op1=1 op2=2 -> op1=2
2361 * @optest op1=0 op2=-42 -> op1=-42
2362 */
2363FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2364{
2365 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2366 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2367 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2368 {
2369 /*
2370 * Register, register.
2371 */
2372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2373 IEM_MC_BEGIN(0, 0);
2374 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2375 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2376 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2377 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2378 IEM_MC_ADVANCE_RIP();
2379 IEM_MC_END();
2380 }
2381 else
2382 {
2383 /*
2384 * Register, memory.
2385 */
2386 IEM_MC_BEGIN(0, 2);
2387 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2389
2390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2392 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2393 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2394
2395 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2396 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2397
2398 IEM_MC_ADVANCE_RIP();
2399 IEM_MC_END();
2400 }
2401 return VINF_SUCCESS;
2402}
2403
2404/**
2405 * @opcode 0x28
2406 * @oppfx 66
2407 * @opcpuid sse2
2408 * @opgroup og_sse2_pcksclr_datamove
2409 * @opxcpttype 1
2410 * @optest op1=1 op2=2 -> op1=2
2411 * @optest op1=0 op2=-42 -> op1=-42
2412 */
2413FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2414{
2415 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2416 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2417 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2418 {
2419 /*
2420 * Register, register.
2421 */
2422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2423 IEM_MC_BEGIN(0, 0);
2424 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2425 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2426 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2427 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2428 IEM_MC_ADVANCE_RIP();
2429 IEM_MC_END();
2430 }
2431 else
2432 {
2433 /*
2434 * Register, memory.
2435 */
2436 IEM_MC_BEGIN(0, 2);
2437 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2439
2440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2442 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2443 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2444
2445 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2446 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2447
2448 IEM_MC_ADVANCE_RIP();
2449 IEM_MC_END();
2450 }
2451 return VINF_SUCCESS;
2452}
2453
2454/* Opcode 0xf3 0x0f 0x28 - invalid */
2455/* Opcode 0xf2 0x0f 0x28 - invalid */
2456
2457/**
2458 * @opcode 0x29
2459 * @oppfx none
2460 * @opcpuid sse
2461 * @opgroup og_sse_simdfp_datamove
2462 * @opxcpttype 1
2463 * @optest op1=1 op2=2 -> op1=2
2464 * @optest op1=0 op2=-42 -> op1=-42
2465 */
2466FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2467{
2468 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2470 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2471 {
2472 /*
2473 * Register, register.
2474 */
2475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2476 IEM_MC_BEGIN(0, 0);
2477 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2478 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2479 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2480 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2481 IEM_MC_ADVANCE_RIP();
2482 IEM_MC_END();
2483 }
2484 else
2485 {
2486 /*
2487 * Memory, register.
2488 */
2489 IEM_MC_BEGIN(0, 2);
2490 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2492
2493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2495 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2496 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2497
2498 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2499 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2500
2501 IEM_MC_ADVANCE_RIP();
2502 IEM_MC_END();
2503 }
2504 return VINF_SUCCESS;
2505}
2506
2507/**
2508 * @opcode 0x29
2509 * @oppfx 66
2510 * @opcpuid sse2
2511 * @opgroup og_sse2_pcksclr_datamove
2512 * @opxcpttype 1
2513 * @optest op1=1 op2=2 -> op1=2
2514 * @optest op1=0 op2=-42 -> op1=-42
2515 */
2516FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2517{
2518 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2521 {
2522 /*
2523 * Register, register.
2524 */
2525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2526 IEM_MC_BEGIN(0, 0);
2527 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2528 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2529 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2530 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2531 IEM_MC_ADVANCE_RIP();
2532 IEM_MC_END();
2533 }
2534 else
2535 {
2536 /*
2537 * Memory, register.
2538 */
2539 IEM_MC_BEGIN(0, 2);
2540 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2542
2543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2545 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2546 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2547
2548 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2549 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2550
2551 IEM_MC_ADVANCE_RIP();
2552 IEM_MC_END();
2553 }
2554 return VINF_SUCCESS;
2555}
2556
2557/* Opcode 0xf3 0x0f 0x29 - invalid */
2558/* Opcode 0xf2 0x0f 0x29 - invalid */
2559
2560
2561/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2562FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2563/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2564FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2565/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2566FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2567/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2568FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2569
2570
2571/**
2572 * @opcode 0x2b
2573 * @opcodesub !11 mr/reg
2574 * @oppfx none
2575 * @opcpuid sse
2576 * @opgroup og_sse1_cachect
2577 * @opxcpttype 1
2578 * @optest op1=1 op2=2 -> op1=2
2579 * @optest op1=0 op2=-42 -> op1=-42
2580 */
2581FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2582{
2583 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2585 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2586 {
2587 /*
2588 * memory, register.
2589 */
2590 IEM_MC_BEGIN(0, 2);
2591 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2593
2594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2596 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2597 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2598
2599 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2600 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2601
2602 IEM_MC_ADVANCE_RIP();
2603 IEM_MC_END();
2604 }
2605 /* The register, register encoding is invalid. */
2606 else
2607 return IEMOP_RAISE_INVALID_OPCODE();
2608 return VINF_SUCCESS;
2609}
2610
2611/**
2612 * @opcode 0x2b
2613 * @opcodesub !11 mr/reg
2614 * @oppfx 0x66
2615 * @opcpuid sse2
2616 * @opgroup og_sse2_cachect
2617 * @opxcpttype 1
2618 * @optest op1=1 op2=2 -> op1=2
2619 * @optest op1=0 op2=-42 -> op1=-42
2620 */
2621FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2622{
2623 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2624 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2625 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2626 {
2627 /*
2628 * memory, register.
2629 */
2630 IEM_MC_BEGIN(0, 2);
2631 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2632 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2633
2634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2636 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2637 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2638
2639 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2640 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2641
2642 IEM_MC_ADVANCE_RIP();
2643 IEM_MC_END();
2644 }
2645 /* The register, register encoding is invalid. */
2646 else
2647 return IEMOP_RAISE_INVALID_OPCODE();
2648 return VINF_SUCCESS;
2649}
2650/* Opcode 0xf3 0x0f 0x2b - invalid */
2651/* Opcode 0xf2 0x0f 0x2b - invalid */
2652
2653
2654/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2655FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2656/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2657FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2658/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2659FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2660/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2661FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2662
2663/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2664FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2665/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2666FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2667/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2668FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2669/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2670FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2671
2672/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2673FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2674/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2675FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2676/* Opcode 0xf3 0x0f 0x2e - invalid */
2677/* Opcode 0xf2 0x0f 0x2e - invalid */
2678
2679/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2680FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2681/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2682FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2683/* Opcode 0xf3 0x0f 0x2f - invalid */
2684/* Opcode 0xf2 0x0f 0x2f - invalid */
2685
2686/** Opcode 0x0f 0x30. */
2687FNIEMOP_DEF(iemOp_wrmsr)
2688{
2689 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2691 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2692}
2693
2694
2695/** Opcode 0x0f 0x31. */
2696FNIEMOP_DEF(iemOp_rdtsc)
2697{
2698 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2700 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2701}
2702
2703
2704/** Opcode 0x0f 0x33. */
2705FNIEMOP_DEF(iemOp_rdmsr)
2706{
2707 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2709 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2710}
2711
2712
2713/** Opcode 0x0f 0x34. */
2714FNIEMOP_DEF(iemOp_rdpmc)
2715{
2716 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2718 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2719}
2720
2721
2722/** Opcode 0x0f 0x34. */
2723FNIEMOP_DEF(iemOp_sysenter)
2724{
2725 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
2726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2727 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysenter);
2728}
2729
2730/** Opcode 0x0f 0x35. */
2731FNIEMOP_DEF(iemOp_sysexit)
2732{
2733 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
2734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2735 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
2736}
2737
2738/** Opcode 0x0f 0x37. */
2739FNIEMOP_STUB(iemOp_getsec);
2740
2741
2742/** Opcode 0x0f 0x38. */
2743FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2744{
2745#ifdef IEM_WITH_THREE_0F_38
2746 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2747 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2748#else
2749 IEMOP_BITCH_ABOUT_STUB();
2750 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2751#endif
2752}
2753
2754
2755/** Opcode 0x0f 0x3a. */
2756FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2757{
2758#ifdef IEM_WITH_THREE_0F_3A
2759 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2760 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2761#else
2762 IEMOP_BITCH_ABOUT_STUB();
2763 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2764#endif
2765}
2766
2767
2768/**
2769 * Implements a conditional move.
2770 *
2771 * Wish there was an obvious way to do this where we could share and reduce
2772 * code bloat.
2773 *
2774 * @param a_Cnd The conditional "microcode" operation.
2775 */
2776#define CMOV_X(a_Cnd) \
2777 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2778 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2779 { \
2780 switch (pVCpu->iem.s.enmEffOpSize) \
2781 { \
2782 case IEMMODE_16BIT: \
2783 IEM_MC_BEGIN(0, 1); \
2784 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2785 a_Cnd { \
2786 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2787 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2788 } IEM_MC_ENDIF(); \
2789 IEM_MC_ADVANCE_RIP(); \
2790 IEM_MC_END(); \
2791 return VINF_SUCCESS; \
2792 \
2793 case IEMMODE_32BIT: \
2794 IEM_MC_BEGIN(0, 1); \
2795 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2796 a_Cnd { \
2797 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2798 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2799 } IEM_MC_ELSE() { \
2800 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2801 } IEM_MC_ENDIF(); \
2802 IEM_MC_ADVANCE_RIP(); \
2803 IEM_MC_END(); \
2804 return VINF_SUCCESS; \
2805 \
2806 case IEMMODE_64BIT: \
2807 IEM_MC_BEGIN(0, 1); \
2808 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2809 a_Cnd { \
2810 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2811 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2812 } IEM_MC_ENDIF(); \
2813 IEM_MC_ADVANCE_RIP(); \
2814 IEM_MC_END(); \
2815 return VINF_SUCCESS; \
2816 \
2817 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2818 } \
2819 } \
2820 else \
2821 { \
2822 switch (pVCpu->iem.s.enmEffOpSize) \
2823 { \
2824 case IEMMODE_16BIT: \
2825 IEM_MC_BEGIN(0, 2); \
2826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2827 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2829 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2830 a_Cnd { \
2831 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2832 } IEM_MC_ENDIF(); \
2833 IEM_MC_ADVANCE_RIP(); \
2834 IEM_MC_END(); \
2835 return VINF_SUCCESS; \
2836 \
2837 case IEMMODE_32BIT: \
2838 IEM_MC_BEGIN(0, 2); \
2839 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2840 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2842 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2843 a_Cnd { \
2844 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2845 } IEM_MC_ELSE() { \
2846 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2847 } IEM_MC_ENDIF(); \
2848 IEM_MC_ADVANCE_RIP(); \
2849 IEM_MC_END(); \
2850 return VINF_SUCCESS; \
2851 \
2852 case IEMMODE_64BIT: \
2853 IEM_MC_BEGIN(0, 2); \
2854 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2855 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2856 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2857 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2858 a_Cnd { \
2859 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2860 } IEM_MC_ENDIF(); \
2861 IEM_MC_ADVANCE_RIP(); \
2862 IEM_MC_END(); \
2863 return VINF_SUCCESS; \
2864 \
2865 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2866 } \
2867 } do {} while (0)
2868
2869
2870
2871/** Opcode 0x0f 0x40. */
2872FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2873{
2874 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2875 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2876}
2877
2878
2879/** Opcode 0x0f 0x41. */
2880FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2881{
2882 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2883 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2884}
2885
2886
2887/** Opcode 0x0f 0x42. */
2888FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2889{
2890 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2891 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2892}
2893
2894
2895/** Opcode 0x0f 0x43. */
2896FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2897{
2898 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2899 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2900}
2901
2902
2903/** Opcode 0x0f 0x44. */
2904FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2905{
2906 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2907 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2908}
2909
2910
2911/** Opcode 0x0f 0x45. */
2912FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2913{
2914 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2915 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2916}
2917
2918
2919/** Opcode 0x0f 0x46. */
2920FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2921{
2922 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2923 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2924}
2925
2926
2927/** Opcode 0x0f 0x47. */
2928FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2929{
2930 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2931 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2932}
2933
2934
2935/** Opcode 0x0f 0x48. */
2936FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2937{
2938 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2939 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2940}
2941
2942
2943/** Opcode 0x0f 0x49. */
2944FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2945{
2946 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2947 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2948}
2949
2950
2951/** Opcode 0x0f 0x4a. */
2952FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2953{
2954 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2955 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2956}
2957
2958
2959/** Opcode 0x0f 0x4b. */
2960FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2961{
2962 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2963 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2964}
2965
2966
2967/** Opcode 0x0f 0x4c. */
2968FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2969{
2970 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2971 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2972}
2973
2974
2975/** Opcode 0x0f 0x4d. */
2976FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2977{
2978 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2979 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2980}
2981
2982
2983/** Opcode 0x0f 0x4e. */
2984FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2985{
2986 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2987 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2988}
2989
2990
2991/** Opcode 0x0f 0x4f. */
2992FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2993{
2994 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2995 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2996}
2997
2998#undef CMOV_X
2999
3000/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
3001FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
3002/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
3003FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
3004/* Opcode 0xf3 0x0f 0x50 - invalid */
3005/* Opcode 0xf2 0x0f 0x50 - invalid */
3006
3007/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
3008FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
3009/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
3010FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
3011/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
3012FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
3013/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
3014FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
3015
3016/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
3017FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
3018/* Opcode 0x66 0x0f 0x52 - invalid */
3019/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
3020FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
3021/* Opcode 0xf2 0x0f 0x52 - invalid */
3022
3023/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
3024FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
3025/* Opcode 0x66 0x0f 0x53 - invalid */
3026/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
3027FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
3028/* Opcode 0xf2 0x0f 0x53 - invalid */
3029
3030
3031/** Opcode 0x0f 0x54 - andps Vps, Wps */
3032FNIEMOP_DEF(iemOp_andps_Vps_Wps)
3033{
3034 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3035 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pand);
3036}
3037
3038
3039/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
3040FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
3041{
3042 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3043 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pand);
3044}
3045
3046
3047/* Opcode 0xf3 0x0f 0x54 - invalid */
3048/* Opcode 0xf2 0x0f 0x54 - invalid */
3049
3050
3051/** Opcode 0x0f 0x55 - andnps Vps, Wps */
3052FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
3053{
3054 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3055 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pandn);
3056}
3057
3058
3059/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
3060FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
3061{
3062 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3063 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pandn);
3064}
3065
3066
3067/* Opcode 0xf3 0x0f 0x55 - invalid */
3068/* Opcode 0xf2 0x0f 0x55 - invalid */
3069
3070
3071/** Opcode 0x0f 0x56 - orps Vps, Wps */
3072FNIEMOP_DEF(iemOp_orps_Vps_Wps)
3073{
3074 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3075 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_por);
3076}
3077
3078
3079/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
3080FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
3081{
3082 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3083 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_por);
3084}
3085
3086
3087/* Opcode 0xf3 0x0f 0x56 - invalid */
3088/* Opcode 0xf2 0x0f 0x56 - invalid */
3089
3090
3091/** Opcode 0x0f 0x57 - xorps Vps, Wps */
3092FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
3093{
3094 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3095 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
3096}
3097
3098
3099/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
3100FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
3101{
3102 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3103 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
3104}
3105
3106
3107/* Opcode 0xf3 0x0f 0x57 - invalid */
3108/* Opcode 0xf2 0x0f 0x57 - invalid */
3109
3110/** Opcode 0x0f 0x58 - addps Vps, Wps */
3111FNIEMOP_STUB(iemOp_addps_Vps_Wps);
3112/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
3113FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
3114/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
3115FNIEMOP_STUB(iemOp_addss_Vss_Wss);
3116/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
3117FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
3118
3119/** Opcode 0x0f 0x59 - mulps Vps, Wps */
3120FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
3121/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
3122FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
3123/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
3124FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
3125/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
3126FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
3127
3128/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
3129FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
3130/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
3131FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
3132/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
3133FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
3134/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
3135FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
3136
3137/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
3138FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
3139/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
3140FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
3141/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
3142FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
3143/* Opcode 0xf2 0x0f 0x5b - invalid */
3144
3145/** Opcode 0x0f 0x5c - subps Vps, Wps */
3146FNIEMOP_STUB(iemOp_subps_Vps_Wps);
3147/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
3148FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
3149/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
3150FNIEMOP_STUB(iemOp_subss_Vss_Wss);
3151/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
3152FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
3153
3154/** Opcode 0x0f 0x5d - minps Vps, Wps */
3155FNIEMOP_STUB(iemOp_minps_Vps_Wps);
3156/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
3157FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
3158/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
3159FNIEMOP_STUB(iemOp_minss_Vss_Wss);
3160/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
3161FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
3162
3163/** Opcode 0x0f 0x5e - divps Vps, Wps */
3164FNIEMOP_STUB(iemOp_divps_Vps_Wps);
3165/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
3166FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
3167/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
3168FNIEMOP_STUB(iemOp_divss_Vss_Wss);
3169/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
3170FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
3171
3172/** Opcode 0x0f 0x5f - maxps Vps, Wps */
3173FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
3174/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
3175FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
3176/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
3177FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
3178/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
3179FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
3180
3181/**
3182 * Common worker for MMX instructions on the forms:
3183 * pxxxx mm1, mm2/mem32
3184 *
3185 * The 2nd operand is the first half of a register, which in the memory case
3186 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3187 * memory accessed for MMX.
3188 *
3189 * Exceptions type 4.
3190 */
3191FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3192{
3193 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3194 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3195 {
3196 /*
3197 * Register, register.
3198 */
3199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3200 IEM_MC_BEGIN(2, 0);
3201 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3202 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3203 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3204 IEM_MC_PREPARE_SSE_USAGE();
3205 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3206 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3207 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3208 IEM_MC_ADVANCE_RIP();
3209 IEM_MC_END();
3210 }
3211 else
3212 {
3213 /*
3214 * Register, memory.
3215 */
3216 IEM_MC_BEGIN(2, 2);
3217 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3218 IEM_MC_LOCAL(uint64_t, uSrc);
3219 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3221
3222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3224 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3225 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3226
3227 IEM_MC_PREPARE_SSE_USAGE();
3228 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3229 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3230
3231 IEM_MC_ADVANCE_RIP();
3232 IEM_MC_END();
3233 }
3234 return VINF_SUCCESS;
3235}
3236
3237
3238/**
3239 * Common worker for SSE2 instructions on the forms:
3240 * pxxxx xmm1, xmm2/mem128
3241 *
3242 * The 2nd operand is the first half of a register, which in the memory case
3243 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3244 * memory accessed for MMX.
3245 *
3246 * Exceptions type 4.
3247 */
3248FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3249{
3250 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3251 if (!pImpl->pfnU64)
3252 return IEMOP_RAISE_INVALID_OPCODE();
3253 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3254 {
3255 /*
3256 * Register, register.
3257 */
3258 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3259 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3261 IEM_MC_BEGIN(2, 0);
3262 IEM_MC_ARG(uint64_t *, pDst, 0);
3263 IEM_MC_ARG(uint32_t const *, pSrc, 1);
3264 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3265 IEM_MC_PREPARE_FPU_USAGE();
3266 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3267 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3268 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3269 IEM_MC_ADVANCE_RIP();
3270 IEM_MC_END();
3271 }
3272 else
3273 {
3274 /*
3275 * Register, memory.
3276 */
3277 IEM_MC_BEGIN(2, 2);
3278 IEM_MC_ARG(uint64_t *, pDst, 0);
3279 IEM_MC_LOCAL(uint32_t, uSrc);
3280 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3282
3283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3285 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3286 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3287
3288 IEM_MC_PREPARE_FPU_USAGE();
3289 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3290 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3291
3292 IEM_MC_ADVANCE_RIP();
3293 IEM_MC_END();
3294 }
3295 return VINF_SUCCESS;
3296}
3297
3298
3299/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3300FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3301{
3302 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3303 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3304}
3305
3306/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3307FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3308{
3309 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3310 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3311}
3312
3313/* Opcode 0xf3 0x0f 0x60 - invalid */
3314
3315
3316/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3317FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3318{
3319 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3320 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3321}
3322
3323/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3324FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3325{
3326 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3327 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3328}
3329
3330/* Opcode 0xf3 0x0f 0x61 - invalid */
3331
3332
3333/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3334FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3335{
3336 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3337 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3338}
3339
3340/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3341FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3342{
3343 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3344 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3345}
3346
3347/* Opcode 0xf3 0x0f 0x62 - invalid */
3348
3349
3350
3351/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3352FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3353/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3354FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3355/* Opcode 0xf3 0x0f 0x63 - invalid */
3356
3357/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3358FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3359/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3360FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3361/* Opcode 0xf3 0x0f 0x64 - invalid */
3362
3363/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3364FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3365/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3366FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3367/* Opcode 0xf3 0x0f 0x65 - invalid */
3368
3369/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3370FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3371/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3372FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3373/* Opcode 0xf3 0x0f 0x66 - invalid */
3374
3375/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3376FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3377/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3378FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3379/* Opcode 0xf3 0x0f 0x67 - invalid */
3380
3381
3382/**
3383 * Common worker for MMX instructions on the form:
3384 * pxxxx mm1, mm2/mem64
3385 *
3386 * The 2nd operand is the second half of a register, which in the memory case
3387 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3388 * where it may read the full 128 bits or only the upper 64 bits.
3389 *
3390 * Exceptions type 4.
3391 */
3392FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3393{
3394 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3395 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3396 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3397 {
3398 /*
3399 * Register, register.
3400 */
3401 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3402 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3404 IEM_MC_BEGIN(2, 0);
3405 IEM_MC_ARG(uint64_t *, pDst, 0);
3406 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3407 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3408 IEM_MC_PREPARE_FPU_USAGE();
3409 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3410 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3411 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3412 IEM_MC_ADVANCE_RIP();
3413 IEM_MC_END();
3414 }
3415 else
3416 {
3417 /*
3418 * Register, memory.
3419 */
3420 IEM_MC_BEGIN(2, 2);
3421 IEM_MC_ARG(uint64_t *, pDst, 0);
3422 IEM_MC_LOCAL(uint64_t, uSrc);
3423 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3425
3426 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3428 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3429 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3430
3431 IEM_MC_PREPARE_FPU_USAGE();
3432 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3433 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3434
3435 IEM_MC_ADVANCE_RIP();
3436 IEM_MC_END();
3437 }
3438 return VINF_SUCCESS;
3439}
3440
3441
3442/**
3443 * Common worker for SSE2 instructions on the form:
3444 * pxxxx xmm1, xmm2/mem128
3445 *
3446 * The 2nd operand is the second half of a register, which in the memory case
3447 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3448 * where it may read the full 128 bits or only the upper 64 bits.
3449 *
3450 * Exceptions type 4.
3451 */
3452FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3453{
3454 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3455 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3456 {
3457 /*
3458 * Register, register.
3459 */
3460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3461 IEM_MC_BEGIN(2, 0);
3462 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3463 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3464 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3465 IEM_MC_PREPARE_SSE_USAGE();
3466 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3467 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3468 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3469 IEM_MC_ADVANCE_RIP();
3470 IEM_MC_END();
3471 }
3472 else
3473 {
3474 /*
3475 * Register, memory.
3476 */
3477 IEM_MC_BEGIN(2, 2);
3478 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3479 IEM_MC_LOCAL(RTUINT128U, uSrc);
3480 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3482
3483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3485 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3486 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3487
3488 IEM_MC_PREPARE_SSE_USAGE();
3489 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3490 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3491
3492 IEM_MC_ADVANCE_RIP();
3493 IEM_MC_END();
3494 }
3495 return VINF_SUCCESS;
3496}
3497
3498
3499/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3500FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3501{
3502 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3503 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3504}
3505
3506/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3507FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3508{
3509 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3510 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3511}
3512/* Opcode 0xf3 0x0f 0x68 - invalid */
3513
3514
3515/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3516FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3517{
3518 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3519 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3520}
3521
3522/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3523FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3524{
3525 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3526 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3527
3528}
3529/* Opcode 0xf3 0x0f 0x69 - invalid */
3530
3531
3532/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3533FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3534{
3535 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3536 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3537}
3538
3539/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3540FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3541{
3542 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3543 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3544}
3545/* Opcode 0xf3 0x0f 0x6a - invalid */
3546
3547
3548/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3549FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3550/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3551FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3552/* Opcode 0xf3 0x0f 0x6b - invalid */
3553
3554
3555/* Opcode 0x0f 0x6c - invalid */
3556
3557/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3558FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3559{
3560 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3561 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3562}
3563
3564/* Opcode 0xf3 0x0f 0x6c - invalid */
3565/* Opcode 0xf2 0x0f 0x6c - invalid */
3566
3567
3568/* Opcode 0x0f 0x6d - invalid */
3569
3570/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3571FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3572{
3573 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3574 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3575}
3576
3577/* Opcode 0xf3 0x0f 0x6d - invalid */
3578
3579
3580FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3581{
3582 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3583 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3584 {
3585 /**
3586 * @opcode 0x6e
3587 * @opcodesub rex.w=1
3588 * @oppfx none
3589 * @opcpuid mmx
3590 * @opgroup og_mmx_datamove
3591 * @opxcpttype 5
3592 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
3593 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
3594 */
3595 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3596 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3597 {
3598 /* MMX, greg64 */
3599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3600 IEM_MC_BEGIN(0, 1);
3601 IEM_MC_LOCAL(uint64_t, u64Tmp);
3602
3603 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3604 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3605
3606 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3607 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3608 IEM_MC_FPU_TO_MMX_MODE();
3609
3610 IEM_MC_ADVANCE_RIP();
3611 IEM_MC_END();
3612 }
3613 else
3614 {
3615 /* MMX, [mem64] */
3616 IEM_MC_BEGIN(0, 2);
3617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3618 IEM_MC_LOCAL(uint64_t, u64Tmp);
3619
3620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3622 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3623 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3624
3625 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3626 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3627 IEM_MC_FPU_TO_MMX_MODE();
3628
3629 IEM_MC_ADVANCE_RIP();
3630 IEM_MC_END();
3631 }
3632 }
3633 else
3634 {
3635 /**
3636 * @opdone
3637 * @opcode 0x6e
3638 * @opcodesub rex.w=0
3639 * @oppfx none
3640 * @opcpuid mmx
3641 * @opgroup og_mmx_datamove
3642 * @opxcpttype 5
3643 * @opfunction iemOp_movd_q_Pd_Ey
3644 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3645 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3646 */
3647 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3648 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3649 {
3650 /* MMX, greg */
3651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3652 IEM_MC_BEGIN(0, 1);
3653 IEM_MC_LOCAL(uint64_t, u64Tmp);
3654
3655 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3656 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3657
3658 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3659 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3660 IEM_MC_FPU_TO_MMX_MODE();
3661
3662 IEM_MC_ADVANCE_RIP();
3663 IEM_MC_END();
3664 }
3665 else
3666 {
3667 /* MMX, [mem] */
3668 IEM_MC_BEGIN(0, 2);
3669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3670 IEM_MC_LOCAL(uint32_t, u32Tmp);
3671
3672 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3674 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3675 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3676
3677 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3678 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3679 IEM_MC_FPU_TO_MMX_MODE();
3680
3681 IEM_MC_ADVANCE_RIP();
3682 IEM_MC_END();
3683 }
3684 }
3685 return VINF_SUCCESS;
3686}
3687
3688FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3689{
3690 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3691 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3692 {
3693 /**
3694 * @opcode 0x6e
3695 * @opcodesub rex.w=1
3696 * @oppfx 0x66
3697 * @opcpuid sse2
3698 * @opgroup og_sse2_simdint_datamove
3699 * @opxcpttype 5
3700 * @optest 64-bit / op1=1 op2=2 -> op1=2
3701 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3702 */
3703 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3704 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3705 {
3706 /* XMM, greg64 */
3707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3708 IEM_MC_BEGIN(0, 1);
3709 IEM_MC_LOCAL(uint64_t, u64Tmp);
3710
3711 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3712 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3713
3714 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3715 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3716
3717 IEM_MC_ADVANCE_RIP();
3718 IEM_MC_END();
3719 }
3720 else
3721 {
3722 /* XMM, [mem64] */
3723 IEM_MC_BEGIN(0, 2);
3724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3725 IEM_MC_LOCAL(uint64_t, u64Tmp);
3726
3727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3729 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3730 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3731
3732 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3733 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3734
3735 IEM_MC_ADVANCE_RIP();
3736 IEM_MC_END();
3737 }
3738 }
3739 else
3740 {
3741 /**
3742 * @opdone
3743 * @opcode 0x6e
3744 * @opcodesub rex.w=0
3745 * @oppfx 0x66
3746 * @opcpuid sse2
3747 * @opgroup og_sse2_simdint_datamove
3748 * @opxcpttype 5
3749 * @opfunction iemOp_movd_q_Vy_Ey
3750 * @optest op1=1 op2=2 -> op1=2
3751 * @optest op1=0 op2=-42 -> op1=-42
3752 */
3753 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3754 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3755 {
3756 /* XMM, greg32 */
3757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3758 IEM_MC_BEGIN(0, 1);
3759 IEM_MC_LOCAL(uint32_t, u32Tmp);
3760
3761 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3762 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3763
3764 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3765 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3766
3767 IEM_MC_ADVANCE_RIP();
3768 IEM_MC_END();
3769 }
3770 else
3771 {
3772 /* XMM, [mem32] */
3773 IEM_MC_BEGIN(0, 2);
3774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3775 IEM_MC_LOCAL(uint32_t, u32Tmp);
3776
3777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3779 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3780 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3781
3782 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3783 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3784
3785 IEM_MC_ADVANCE_RIP();
3786 IEM_MC_END();
3787 }
3788 }
3789 return VINF_SUCCESS;
3790}
3791
3792/* Opcode 0xf3 0x0f 0x6e - invalid */
3793
3794
3795/**
3796 * @opcode 0x6f
3797 * @oppfx none
3798 * @opcpuid mmx
3799 * @opgroup og_mmx_datamove
3800 * @opxcpttype 5
3801 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3802 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3803 */
3804FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3805{
3806 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3807 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3808 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3809 {
3810 /*
3811 * Register, register.
3812 */
3813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3814 IEM_MC_BEGIN(0, 1);
3815 IEM_MC_LOCAL(uint64_t, u64Tmp);
3816
3817 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3818 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3819
3820 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3821 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3822 IEM_MC_FPU_TO_MMX_MODE();
3823
3824 IEM_MC_ADVANCE_RIP();
3825 IEM_MC_END();
3826 }
3827 else
3828 {
3829 /*
3830 * Register, memory.
3831 */
3832 IEM_MC_BEGIN(0, 2);
3833 IEM_MC_LOCAL(uint64_t, u64Tmp);
3834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3835
3836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3838 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3839 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3840
3841 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3842 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3843 IEM_MC_FPU_TO_MMX_MODE();
3844
3845 IEM_MC_ADVANCE_RIP();
3846 IEM_MC_END();
3847 }
3848 return VINF_SUCCESS;
3849}
3850
3851/**
3852 * @opcode 0x6f
3853 * @oppfx 0x66
3854 * @opcpuid sse2
3855 * @opgroup og_sse2_simdint_datamove
3856 * @opxcpttype 1
3857 * @optest op1=1 op2=2 -> op1=2
3858 * @optest op1=0 op2=-42 -> op1=-42
3859 */
3860FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
3861{
3862 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3863 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3864 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3865 {
3866 /*
3867 * Register, register.
3868 */
3869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3870 IEM_MC_BEGIN(0, 0);
3871
3872 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3873 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3874
3875 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3876 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3877 IEM_MC_ADVANCE_RIP();
3878 IEM_MC_END();
3879 }
3880 else
3881 {
3882 /*
3883 * Register, memory.
3884 */
3885 IEM_MC_BEGIN(0, 2);
3886 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3887 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3888
3889 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3891 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3892 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3893
3894 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3895 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3896
3897 IEM_MC_ADVANCE_RIP();
3898 IEM_MC_END();
3899 }
3900 return VINF_SUCCESS;
3901}
3902
3903/**
3904 * @opcode 0x6f
3905 * @oppfx 0xf3
3906 * @opcpuid sse2
3907 * @opgroup og_sse2_simdint_datamove
3908 * @opxcpttype 4UA
3909 * @optest op1=1 op2=2 -> op1=2
3910 * @optest op1=0 op2=-42 -> op1=-42
3911 */
3912FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
3913{
3914 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3915 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3916 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3917 {
3918 /*
3919 * Register, register.
3920 */
3921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3922 IEM_MC_BEGIN(0, 0);
3923 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3924 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3925 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3926 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3927 IEM_MC_ADVANCE_RIP();
3928 IEM_MC_END();
3929 }
3930 else
3931 {
3932 /*
3933 * Register, memory.
3934 */
3935 IEM_MC_BEGIN(0, 2);
3936 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3938
3939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3941 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3942 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3943 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3944 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3945
3946 IEM_MC_ADVANCE_RIP();
3947 IEM_MC_END();
3948 }
3949 return VINF_SUCCESS;
3950}
3951
3952
3953/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3954FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3955{
3956 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3957 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3958 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3959 {
3960 /*
3961 * Register, register.
3962 */
3963 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3965
3966 IEM_MC_BEGIN(3, 0);
3967 IEM_MC_ARG(uint64_t *, pDst, 0);
3968 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3969 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3970 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3971 IEM_MC_PREPARE_FPU_USAGE();
3972 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3973 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3974 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3975 IEM_MC_ADVANCE_RIP();
3976 IEM_MC_END();
3977 }
3978 else
3979 {
3980 /*
3981 * Register, memory.
3982 */
3983 IEM_MC_BEGIN(3, 2);
3984 IEM_MC_ARG(uint64_t *, pDst, 0);
3985 IEM_MC_LOCAL(uint64_t, uSrc);
3986 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3988
3989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3990 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3991 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3993 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3994
3995 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3996 IEM_MC_PREPARE_FPU_USAGE();
3997 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3998 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3999
4000 IEM_MC_ADVANCE_RIP();
4001 IEM_MC_END();
4002 }
4003 return VINF_SUCCESS;
4004}
4005
4006/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
4007FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
4008{
4009 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
4010 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4011 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4012 {
4013 /*
4014 * Register, register.
4015 */
4016 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4018
4019 IEM_MC_BEGIN(3, 0);
4020 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4021 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4022 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4023 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4024 IEM_MC_PREPARE_SSE_USAGE();
4025 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4026 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4027 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
4028 IEM_MC_ADVANCE_RIP();
4029 IEM_MC_END();
4030 }
4031 else
4032 {
4033 /*
4034 * Register, memory.
4035 */
4036 IEM_MC_BEGIN(3, 2);
4037 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4038 IEM_MC_LOCAL(RTUINT128U, uSrc);
4039 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4041
4042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4043 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4044 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4046 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4047
4048 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4049 IEM_MC_PREPARE_SSE_USAGE();
4050 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4051 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
4052
4053 IEM_MC_ADVANCE_RIP();
4054 IEM_MC_END();
4055 }
4056 return VINF_SUCCESS;
4057}
4058
4059/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
4060FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
4061{
4062 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
4063 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4064 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4065 {
4066 /*
4067 * Register, register.
4068 */
4069 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4071
4072 IEM_MC_BEGIN(3, 0);
4073 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4074 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4075 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4076 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4077 IEM_MC_PREPARE_SSE_USAGE();
4078 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4079 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4080 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
4081 IEM_MC_ADVANCE_RIP();
4082 IEM_MC_END();
4083 }
4084 else
4085 {
4086 /*
4087 * Register, memory.
4088 */
4089 IEM_MC_BEGIN(3, 2);
4090 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4091 IEM_MC_LOCAL(RTUINT128U, uSrc);
4092 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4093 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4094
4095 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4096 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4097 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4099 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4100
4101 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4102 IEM_MC_PREPARE_SSE_USAGE();
4103 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4104 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
4105
4106 IEM_MC_ADVANCE_RIP();
4107 IEM_MC_END();
4108 }
4109 return VINF_SUCCESS;
4110}
4111
4112/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
4113FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
4114{
4115 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
4116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4117 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4118 {
4119 /*
4120 * Register, register.
4121 */
4122 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4124
4125 IEM_MC_BEGIN(3, 0);
4126 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4127 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4128 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4129 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4130 IEM_MC_PREPARE_SSE_USAGE();
4131 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4132 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4133 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
4134 IEM_MC_ADVANCE_RIP();
4135 IEM_MC_END();
4136 }
4137 else
4138 {
4139 /*
4140 * Register, memory.
4141 */
4142 IEM_MC_BEGIN(3, 2);
4143 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4144 IEM_MC_LOCAL(RTUINT128U, uSrc);
4145 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4147
4148 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4149 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4150 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4152 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4153
4154 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4155 IEM_MC_PREPARE_SSE_USAGE();
4156 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4157 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
4158
4159 IEM_MC_ADVANCE_RIP();
4160 IEM_MC_END();
4161 }
4162 return VINF_SUCCESS;
4163}
4164
4165
4166/** Opcode 0x0f 0x71 11/2. */
4167FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
4168
4169/** Opcode 0x66 0x0f 0x71 11/2. */
4170FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
4171
4172/** Opcode 0x0f 0x71 11/4. */
4173FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
4174
4175/** Opcode 0x66 0x0f 0x71 11/4. */
4176FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
4177
4178/** Opcode 0x0f 0x71 11/6. */
4179FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
4180
4181/** Opcode 0x66 0x0f 0x71 11/6. */
4182FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
4183
4184
4185/**
4186 * Group 12 jump table for register variant.
4187 */
4188IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
4189{
4190 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4191 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4192 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4193 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4194 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4195 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4196 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4197 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4198};
4199AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
4200
4201
4202/** Opcode 0x0f 0x71. */
4203FNIEMOP_DEF(iemOp_Grp12)
4204{
4205 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4206 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4207 /* register, register */
4208 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4209 + pVCpu->iem.s.idxPrefix], bRm);
4210 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4211}
4212
4213
4214/** Opcode 0x0f 0x72 11/2. */
4215FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
4216
4217/** Opcode 0x66 0x0f 0x72 11/2. */
4218FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
4219
4220/** Opcode 0x0f 0x72 11/4. */
4221FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
4222
4223/** Opcode 0x66 0x0f 0x72 11/4. */
4224FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
4225
4226/** Opcode 0x0f 0x72 11/6. */
4227FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
4228
4229/** Opcode 0x66 0x0f 0x72 11/6. */
4230FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
4231
4232
4233/**
4234 * Group 13 jump table for register variant.
4235 */
4236IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
4237{
4238 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4239 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4240 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4241 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4242 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4243 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4244 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4245 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4246};
4247AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
4248
4249/** Opcode 0x0f 0x72. */
4250FNIEMOP_DEF(iemOp_Grp13)
4251{
4252 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4253 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4254 /* register, register */
4255 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4256 + pVCpu->iem.s.idxPrefix], bRm);
4257 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4258}
4259
4260
4261/** Opcode 0x0f 0x73 11/2. */
4262FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
4263
4264/** Opcode 0x66 0x0f 0x73 11/2. */
4265FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
4266
4267/** Opcode 0x66 0x0f 0x73 11/3. */
4268FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
4269
4270/** Opcode 0x0f 0x73 11/6. */
4271FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
4272
4273/** Opcode 0x66 0x0f 0x73 11/6. */
4274FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
4275
4276/** Opcode 0x66 0x0f 0x73 11/7. */
4277FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
4278
4279/**
4280 * Group 14 jump table for register variant.
4281 */
4282IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4283{
4284 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4285 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4286 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4287 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4288 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4289 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4290 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4291 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4292};
4293AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4294
4295
4296/** Opcode 0x0f 0x73. */
4297FNIEMOP_DEF(iemOp_Grp14)
4298{
4299 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4300 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4301 /* register, register */
4302 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4303 + pVCpu->iem.s.idxPrefix], bRm);
4304 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4305}
4306
4307
4308/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4309FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4310{
4311 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
4312 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4313}
4314
4315/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4316FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4317{
4318 IEMOP_MNEMONIC(pcmpeqb_Vx_Wx, "pcmpeqb");
4319 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4320}
4321
4322/* Opcode 0xf3 0x0f 0x74 - invalid */
4323/* Opcode 0xf2 0x0f 0x74 - invalid */
4324
4325
4326/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4327FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4328{
4329 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
4330 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4331}
4332
4333/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4334FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4335{
4336 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
4337 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4338}
4339
4340/* Opcode 0xf3 0x0f 0x75 - invalid */
4341/* Opcode 0xf2 0x0f 0x75 - invalid */
4342
4343
4344/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4345FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4346{
4347 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
4348 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4349}
4350
4351/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4352FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4353{
4354 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "pcmpeqd");
4355 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4356}
4357
4358/* Opcode 0xf3 0x0f 0x76 - invalid */
4359/* Opcode 0xf2 0x0f 0x76 - invalid */
4360
4361
4362/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4363FNIEMOP_DEF(iemOp_emms)
4364{
4365 IEMOP_MNEMONIC(emms, "emms");
4366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4367
4368 IEM_MC_BEGIN(0,0);
4369 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
4370 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4371 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4372 IEM_MC_FPU_FROM_MMX_MODE();
4373 IEM_MC_ADVANCE_RIP();
4374 IEM_MC_END();
4375 return VINF_SUCCESS;
4376}
4377
4378/* Opcode 0x66 0x0f 0x77 - invalid */
4379/* Opcode 0xf3 0x0f 0x77 - invalid */
4380/* Opcode 0xf2 0x0f 0x77 - invalid */
4381
4382/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4383#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4384FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
4385{
4386 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
4387 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
4388 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
4389 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
4390
4391 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4392 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4393 {
4394 /*
4395 * Register, register.
4396 */
4397 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4398 if (enmEffOpSize == IEMMODE_64BIT)
4399 {
4400 IEM_MC_BEGIN(2, 0);
4401 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4402 IEM_MC_ARG(uint64_t, u64Enc, 1);
4403 IEM_MC_FETCH_GREG_U64(u64Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4404 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4405 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg64, pu64Dst, u64Enc);
4406 IEM_MC_END();
4407 }
4408 else
4409 {
4410 IEM_MC_BEGIN(2, 0);
4411 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4412 IEM_MC_ARG(uint32_t, u32Enc, 1);
4413 IEM_MC_FETCH_GREG_U32(u32Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4414 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4415 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg32, pu32Dst, u32Enc);
4416 IEM_MC_END();
4417 }
4418 }
4419 else
4420 {
4421 /*
4422 * Memory, register.
4423 */
4424 if (enmEffOpSize == IEMMODE_64BIT)
4425 {
4426 IEM_MC_BEGIN(3, 0);
4427 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4428 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
4429 IEM_MC_ARG(uint64_t, u64Enc, 2);
4430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4431 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4432 IEM_MC_FETCH_GREG_U64(u64Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4433 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4434 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
4435 IEM_MC_END();
4436 }
4437 else
4438 {
4439 IEM_MC_BEGIN(3, 0);
4440 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4441 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
4442 IEM_MC_ARG(uint32_t, u32Enc, 2);
4443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4444 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4445 IEM_MC_FETCH_GREG_U32(u32Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4446 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4447 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
4448 IEM_MC_END();
4449 }
4450 }
4451 return VINF_SUCCESS;
4452}
4453#else
4454FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4455#endif
4456
4457/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4458FNIEMOP_STUB(iemOp_AmdGrp17);
4459/* Opcode 0xf3 0x0f 0x78 - invalid */
4460/* Opcode 0xf2 0x0f 0x78 - invalid */
4461
4462/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4463#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4464FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
4465{
4466 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
4467 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
4468 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
4469 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
4470
4471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4472 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4473 {
4474 /*
4475 * Register, register.
4476 */
4477 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4478 if (enmEffOpSize == IEMMODE_64BIT)
4479 {
4480 IEM_MC_BEGIN(2, 0);
4481 IEM_MC_ARG(uint64_t, u64Val, 0);
4482 IEM_MC_ARG(uint64_t, u64Enc, 1);
4483 IEM_MC_FETCH_GREG_U64(u64Val, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4484 IEM_MC_FETCH_GREG_U64(u64Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4485 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
4486 IEM_MC_END();
4487 }
4488 else
4489 {
4490 IEM_MC_BEGIN(2, 0);
4491 IEM_MC_ARG(uint32_t, u32Val, 0);
4492 IEM_MC_ARG(uint32_t, u32Enc, 1);
4493 IEM_MC_FETCH_GREG_U32(u32Val, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4494 IEM_MC_FETCH_GREG_U32(u32Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4495 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
4496 IEM_MC_END();
4497 }
4498 }
4499 else
4500 {
4501 /*
4502 * Register, memory.
4503 */
4504 if (enmEffOpSize == IEMMODE_64BIT)
4505 {
4506 IEM_MC_BEGIN(3, 0);
4507 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4508 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
4509 IEM_MC_ARG(uint64_t, u64Enc, 2);
4510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4511 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4512 IEM_MC_FETCH_GREG_U64(u64Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4513 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4514 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
4515 IEM_MC_END();
4516 }
4517 else
4518 {
4519 IEM_MC_BEGIN(3, 0);
4520 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4521 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
4522 IEM_MC_ARG(uint32_t, u32Enc, 2);
4523 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4524 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4525 IEM_MC_FETCH_GREG_U32(u32Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4526 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4527 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
4528 IEM_MC_END();
4529 }
4530 }
4531 return VINF_SUCCESS;
4532}
4533#else
4534FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4535#endif
4536/* Opcode 0x66 0x0f 0x79 - invalid */
4537/* Opcode 0xf3 0x0f 0x79 - invalid */
4538/* Opcode 0xf2 0x0f 0x79 - invalid */
4539
4540/* Opcode 0x0f 0x7a - invalid */
4541/* Opcode 0x66 0x0f 0x7a - invalid */
4542/* Opcode 0xf3 0x0f 0x7a - invalid */
4543/* Opcode 0xf2 0x0f 0x7a - invalid */
4544
4545/* Opcode 0x0f 0x7b - invalid */
4546/* Opcode 0x66 0x0f 0x7b - invalid */
4547/* Opcode 0xf3 0x0f 0x7b - invalid */
4548/* Opcode 0xf2 0x0f 0x7b - invalid */
4549
4550/* Opcode 0x0f 0x7c - invalid */
4551/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4552FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4553/* Opcode 0xf3 0x0f 0x7c - invalid */
4554/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4555FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4556
4557/* Opcode 0x0f 0x7d - invalid */
4558/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4559FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4560/* Opcode 0xf3 0x0f 0x7d - invalid */
4561/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4562FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4563
4564
4565/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4566FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4567{
4568 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4569 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4570 {
4571 /**
4572 * @opcode 0x7e
4573 * @opcodesub rex.w=1
4574 * @oppfx none
4575 * @opcpuid mmx
4576 * @opgroup og_mmx_datamove
4577 * @opxcpttype 5
4578 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
4579 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
4580 */
4581 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4582 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4583 {
4584 /* greg64, MMX */
4585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4586 IEM_MC_BEGIN(0, 1);
4587 IEM_MC_LOCAL(uint64_t, u64Tmp);
4588
4589 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4590 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4591
4592 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4593 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4594 IEM_MC_FPU_TO_MMX_MODE();
4595
4596 IEM_MC_ADVANCE_RIP();
4597 IEM_MC_END();
4598 }
4599 else
4600 {
4601 /* [mem64], MMX */
4602 IEM_MC_BEGIN(0, 2);
4603 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4604 IEM_MC_LOCAL(uint64_t, u64Tmp);
4605
4606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4608 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4609 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4610
4611 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4612 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4613 IEM_MC_FPU_TO_MMX_MODE();
4614
4615 IEM_MC_ADVANCE_RIP();
4616 IEM_MC_END();
4617 }
4618 }
4619 else
4620 {
4621 /**
4622 * @opdone
4623 * @opcode 0x7e
4624 * @opcodesub rex.w=0
4625 * @oppfx none
4626 * @opcpuid mmx
4627 * @opgroup og_mmx_datamove
4628 * @opxcpttype 5
4629 * @opfunction iemOp_movd_q_Pd_Ey
4630 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4631 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4632 */
4633 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4634 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4635 {
4636 /* greg32, MMX */
4637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4638 IEM_MC_BEGIN(0, 1);
4639 IEM_MC_LOCAL(uint32_t, u32Tmp);
4640
4641 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4642 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4643
4644 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4645 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4646 IEM_MC_FPU_TO_MMX_MODE();
4647
4648 IEM_MC_ADVANCE_RIP();
4649 IEM_MC_END();
4650 }
4651 else
4652 {
4653 /* [mem32], MMX */
4654 IEM_MC_BEGIN(0, 2);
4655 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4656 IEM_MC_LOCAL(uint32_t, u32Tmp);
4657
4658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4660 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4661 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4662
4663 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4664 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4665 IEM_MC_FPU_TO_MMX_MODE();
4666
4667 IEM_MC_ADVANCE_RIP();
4668 IEM_MC_END();
4669 }
4670 }
4671 return VINF_SUCCESS;
4672
4673}
4674
4675
4676FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4677{
4678 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4679 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4680 {
4681 /**
4682 * @opcode 0x7e
4683 * @opcodesub rex.w=1
4684 * @oppfx 0x66
4685 * @opcpuid sse2
4686 * @opgroup og_sse2_simdint_datamove
4687 * @opxcpttype 5
4688 * @optest 64-bit / op1=1 op2=2 -> op1=2
4689 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4690 */
4691 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4692 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4693 {
4694 /* greg64, XMM */
4695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4696 IEM_MC_BEGIN(0, 1);
4697 IEM_MC_LOCAL(uint64_t, u64Tmp);
4698
4699 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4700 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4701
4702 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4703 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4704
4705 IEM_MC_ADVANCE_RIP();
4706 IEM_MC_END();
4707 }
4708 else
4709 {
4710 /* [mem64], XMM */
4711 IEM_MC_BEGIN(0, 2);
4712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4713 IEM_MC_LOCAL(uint64_t, u64Tmp);
4714
4715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4717 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4718 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4719
4720 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4721 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4722
4723 IEM_MC_ADVANCE_RIP();
4724 IEM_MC_END();
4725 }
4726 }
4727 else
4728 {
4729 /**
4730 * @opdone
4731 * @opcode 0x7e
4732 * @opcodesub rex.w=0
4733 * @oppfx 0x66
4734 * @opcpuid sse2
4735 * @opgroup og_sse2_simdint_datamove
4736 * @opxcpttype 5
4737 * @opfunction iemOp_movd_q_Vy_Ey
4738 * @optest op1=1 op2=2 -> op1=2
4739 * @optest op1=0 op2=-42 -> op1=-42
4740 */
4741 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4742 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4743 {
4744 /* greg32, XMM */
4745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4746 IEM_MC_BEGIN(0, 1);
4747 IEM_MC_LOCAL(uint32_t, u32Tmp);
4748
4749 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4750 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4751
4752 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4753 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4754
4755 IEM_MC_ADVANCE_RIP();
4756 IEM_MC_END();
4757 }
4758 else
4759 {
4760 /* [mem32], XMM */
4761 IEM_MC_BEGIN(0, 2);
4762 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4763 IEM_MC_LOCAL(uint32_t, u32Tmp);
4764
4765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4767 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4768 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4769
4770 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4771 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4772
4773 IEM_MC_ADVANCE_RIP();
4774 IEM_MC_END();
4775 }
4776 }
4777 return VINF_SUCCESS;
4778
4779}
4780
4781/**
4782 * @opcode 0x7e
4783 * @oppfx 0xf3
4784 * @opcpuid sse2
4785 * @opgroup og_sse2_pcksclr_datamove
4786 * @opxcpttype none
4787 * @optest op1=1 op2=2 -> op1=2
4788 * @optest op1=0 op2=-42 -> op1=-42
4789 */
4790FNIEMOP_DEF(iemOp_movq_Vq_Wq)
4791{
4792 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4793 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4794 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4795 {
4796 /*
4797 * Register, register.
4798 */
4799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4800 IEM_MC_BEGIN(0, 2);
4801 IEM_MC_LOCAL(uint64_t, uSrc);
4802
4803 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4804 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4805
4806 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4807 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4808
4809 IEM_MC_ADVANCE_RIP();
4810 IEM_MC_END();
4811 }
4812 else
4813 {
4814 /*
4815 * Memory, register.
4816 */
4817 IEM_MC_BEGIN(0, 2);
4818 IEM_MC_LOCAL(uint64_t, uSrc);
4819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4820
4821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4823 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4824 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4825
4826 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4827 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4828
4829 IEM_MC_ADVANCE_RIP();
4830 IEM_MC_END();
4831 }
4832 return VINF_SUCCESS;
4833}
4834
4835/* Opcode 0xf2 0x0f 0x7e - invalid */
4836
4837
4838/** Opcode 0x0f 0x7f - movq Qq, Pq */
4839FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4840{
4841 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4842 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4843 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4844 {
4845 /*
4846 * Register, register.
4847 */
4848 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4849 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4851 IEM_MC_BEGIN(0, 1);
4852 IEM_MC_LOCAL(uint64_t, u64Tmp);
4853 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4854 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4855 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4856 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4857 IEM_MC_ADVANCE_RIP();
4858 IEM_MC_END();
4859 }
4860 else
4861 {
4862 /*
4863 * Register, memory.
4864 */
4865 IEM_MC_BEGIN(0, 2);
4866 IEM_MC_LOCAL(uint64_t, u64Tmp);
4867 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4868
4869 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4871 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4872 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4873
4874 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4875 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4876
4877 IEM_MC_ADVANCE_RIP();
4878 IEM_MC_END();
4879 }
4880 return VINF_SUCCESS;
4881}
4882
4883/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4884FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4885{
4886 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4888 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4889 {
4890 /*
4891 * Register, register.
4892 */
4893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4894 IEM_MC_BEGIN(0, 0);
4895 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4896 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4897 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4898 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4899 IEM_MC_ADVANCE_RIP();
4900 IEM_MC_END();
4901 }
4902 else
4903 {
4904 /*
4905 * Register, memory.
4906 */
4907 IEM_MC_BEGIN(0, 2);
4908 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4910
4911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4913 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4914 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4915
4916 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4917 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4918
4919 IEM_MC_ADVANCE_RIP();
4920 IEM_MC_END();
4921 }
4922 return VINF_SUCCESS;
4923}
4924
4925/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4926FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4927{
4928 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4929 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4930 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4931 {
4932 /*
4933 * Register, register.
4934 */
4935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4936 IEM_MC_BEGIN(0, 0);
4937 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4938 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4939 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4940 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4941 IEM_MC_ADVANCE_RIP();
4942 IEM_MC_END();
4943 }
4944 else
4945 {
4946 /*
4947 * Register, memory.
4948 */
4949 IEM_MC_BEGIN(0, 2);
4950 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4952
4953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4955 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4956 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4957
4958 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4959 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4960
4961 IEM_MC_ADVANCE_RIP();
4962 IEM_MC_END();
4963 }
4964 return VINF_SUCCESS;
4965}
4966
4967/* Opcode 0xf2 0x0f 0x7f - invalid */
4968
4969
4970
4971/** Opcode 0x0f 0x80. */
4972FNIEMOP_DEF(iemOp_jo_Jv)
4973{
4974 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4975 IEMOP_HLP_MIN_386();
4976 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4977 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4978 {
4979 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4981
4982 IEM_MC_BEGIN(0, 0);
4983 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4984 IEM_MC_REL_JMP_S16(i16Imm);
4985 } IEM_MC_ELSE() {
4986 IEM_MC_ADVANCE_RIP();
4987 } IEM_MC_ENDIF();
4988 IEM_MC_END();
4989 }
4990 else
4991 {
4992 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4994
4995 IEM_MC_BEGIN(0, 0);
4996 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4997 IEM_MC_REL_JMP_S32(i32Imm);
4998 } IEM_MC_ELSE() {
4999 IEM_MC_ADVANCE_RIP();
5000 } IEM_MC_ENDIF();
5001 IEM_MC_END();
5002 }
5003 return VINF_SUCCESS;
5004}
5005
5006
5007/** Opcode 0x0f 0x81. */
5008FNIEMOP_DEF(iemOp_jno_Jv)
5009{
5010 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
5011 IEMOP_HLP_MIN_386();
5012 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5013 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5014 {
5015 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5017
5018 IEM_MC_BEGIN(0, 0);
5019 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5020 IEM_MC_ADVANCE_RIP();
5021 } IEM_MC_ELSE() {
5022 IEM_MC_REL_JMP_S16(i16Imm);
5023 } IEM_MC_ENDIF();
5024 IEM_MC_END();
5025 }
5026 else
5027 {
5028 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5030
5031 IEM_MC_BEGIN(0, 0);
5032 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5033 IEM_MC_ADVANCE_RIP();
5034 } IEM_MC_ELSE() {
5035 IEM_MC_REL_JMP_S32(i32Imm);
5036 } IEM_MC_ENDIF();
5037 IEM_MC_END();
5038 }
5039 return VINF_SUCCESS;
5040}
5041
5042
5043/** Opcode 0x0f 0x82. */
5044FNIEMOP_DEF(iemOp_jc_Jv)
5045{
5046 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
5047 IEMOP_HLP_MIN_386();
5048 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5049 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5050 {
5051 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5053
5054 IEM_MC_BEGIN(0, 0);
5055 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5056 IEM_MC_REL_JMP_S16(i16Imm);
5057 } IEM_MC_ELSE() {
5058 IEM_MC_ADVANCE_RIP();
5059 } IEM_MC_ENDIF();
5060 IEM_MC_END();
5061 }
5062 else
5063 {
5064 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5066
5067 IEM_MC_BEGIN(0, 0);
5068 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5069 IEM_MC_REL_JMP_S32(i32Imm);
5070 } IEM_MC_ELSE() {
5071 IEM_MC_ADVANCE_RIP();
5072 } IEM_MC_ENDIF();
5073 IEM_MC_END();
5074 }
5075 return VINF_SUCCESS;
5076}
5077
5078
5079/** Opcode 0x0f 0x83. */
5080FNIEMOP_DEF(iemOp_jnc_Jv)
5081{
5082 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
5083 IEMOP_HLP_MIN_386();
5084 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5085 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5086 {
5087 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5089
5090 IEM_MC_BEGIN(0, 0);
5091 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5092 IEM_MC_ADVANCE_RIP();
5093 } IEM_MC_ELSE() {
5094 IEM_MC_REL_JMP_S16(i16Imm);
5095 } IEM_MC_ENDIF();
5096 IEM_MC_END();
5097 }
5098 else
5099 {
5100 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5102
5103 IEM_MC_BEGIN(0, 0);
5104 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5105 IEM_MC_ADVANCE_RIP();
5106 } IEM_MC_ELSE() {
5107 IEM_MC_REL_JMP_S32(i32Imm);
5108 } IEM_MC_ENDIF();
5109 IEM_MC_END();
5110 }
5111 return VINF_SUCCESS;
5112}
5113
5114
5115/** Opcode 0x0f 0x84. */
5116FNIEMOP_DEF(iemOp_je_Jv)
5117{
5118 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
5119 IEMOP_HLP_MIN_386();
5120 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5121 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5122 {
5123 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5125
5126 IEM_MC_BEGIN(0, 0);
5127 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5128 IEM_MC_REL_JMP_S16(i16Imm);
5129 } IEM_MC_ELSE() {
5130 IEM_MC_ADVANCE_RIP();
5131 } IEM_MC_ENDIF();
5132 IEM_MC_END();
5133 }
5134 else
5135 {
5136 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5138
5139 IEM_MC_BEGIN(0, 0);
5140 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5141 IEM_MC_REL_JMP_S32(i32Imm);
5142 } IEM_MC_ELSE() {
5143 IEM_MC_ADVANCE_RIP();
5144 } IEM_MC_ENDIF();
5145 IEM_MC_END();
5146 }
5147 return VINF_SUCCESS;
5148}
5149
5150
5151/** Opcode 0x0f 0x85. */
5152FNIEMOP_DEF(iemOp_jne_Jv)
5153{
5154 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
5155 IEMOP_HLP_MIN_386();
5156 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5157 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5158 {
5159 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5161
5162 IEM_MC_BEGIN(0, 0);
5163 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5164 IEM_MC_ADVANCE_RIP();
5165 } IEM_MC_ELSE() {
5166 IEM_MC_REL_JMP_S16(i16Imm);
5167 } IEM_MC_ENDIF();
5168 IEM_MC_END();
5169 }
5170 else
5171 {
5172 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5174
5175 IEM_MC_BEGIN(0, 0);
5176 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5177 IEM_MC_ADVANCE_RIP();
5178 } IEM_MC_ELSE() {
5179 IEM_MC_REL_JMP_S32(i32Imm);
5180 } IEM_MC_ENDIF();
5181 IEM_MC_END();
5182 }
5183 return VINF_SUCCESS;
5184}
5185
5186
5187/** Opcode 0x0f 0x86. */
5188FNIEMOP_DEF(iemOp_jbe_Jv)
5189{
5190 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
5191 IEMOP_HLP_MIN_386();
5192 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5193 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5194 {
5195 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5197
5198 IEM_MC_BEGIN(0, 0);
5199 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5200 IEM_MC_REL_JMP_S16(i16Imm);
5201 } IEM_MC_ELSE() {
5202 IEM_MC_ADVANCE_RIP();
5203 } IEM_MC_ENDIF();
5204 IEM_MC_END();
5205 }
5206 else
5207 {
5208 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5210
5211 IEM_MC_BEGIN(0, 0);
5212 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5213 IEM_MC_REL_JMP_S32(i32Imm);
5214 } IEM_MC_ELSE() {
5215 IEM_MC_ADVANCE_RIP();
5216 } IEM_MC_ENDIF();
5217 IEM_MC_END();
5218 }
5219 return VINF_SUCCESS;
5220}
5221
5222
5223/** Opcode 0x0f 0x87. */
5224FNIEMOP_DEF(iemOp_jnbe_Jv)
5225{
5226 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
5227 IEMOP_HLP_MIN_386();
5228 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5229 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5230 {
5231 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5233
5234 IEM_MC_BEGIN(0, 0);
5235 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5236 IEM_MC_ADVANCE_RIP();
5237 } IEM_MC_ELSE() {
5238 IEM_MC_REL_JMP_S16(i16Imm);
5239 } IEM_MC_ENDIF();
5240 IEM_MC_END();
5241 }
5242 else
5243 {
5244 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5246
5247 IEM_MC_BEGIN(0, 0);
5248 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5249 IEM_MC_ADVANCE_RIP();
5250 } IEM_MC_ELSE() {
5251 IEM_MC_REL_JMP_S32(i32Imm);
5252 } IEM_MC_ENDIF();
5253 IEM_MC_END();
5254 }
5255 return VINF_SUCCESS;
5256}
5257
5258
5259/** Opcode 0x0f 0x88. */
5260FNIEMOP_DEF(iemOp_js_Jv)
5261{
5262 IEMOP_MNEMONIC(js_Jv, "js Jv");
5263 IEMOP_HLP_MIN_386();
5264 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5265 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5266 {
5267 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5269
5270 IEM_MC_BEGIN(0, 0);
5271 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5272 IEM_MC_REL_JMP_S16(i16Imm);
5273 } IEM_MC_ELSE() {
5274 IEM_MC_ADVANCE_RIP();
5275 } IEM_MC_ENDIF();
5276 IEM_MC_END();
5277 }
5278 else
5279 {
5280 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5282
5283 IEM_MC_BEGIN(0, 0);
5284 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5285 IEM_MC_REL_JMP_S32(i32Imm);
5286 } IEM_MC_ELSE() {
5287 IEM_MC_ADVANCE_RIP();
5288 } IEM_MC_ENDIF();
5289 IEM_MC_END();
5290 }
5291 return VINF_SUCCESS;
5292}
5293
5294
5295/** Opcode 0x0f 0x89. */
5296FNIEMOP_DEF(iemOp_jns_Jv)
5297{
5298 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
5299 IEMOP_HLP_MIN_386();
5300 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5301 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5302 {
5303 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5305
5306 IEM_MC_BEGIN(0, 0);
5307 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5308 IEM_MC_ADVANCE_RIP();
5309 } IEM_MC_ELSE() {
5310 IEM_MC_REL_JMP_S16(i16Imm);
5311 } IEM_MC_ENDIF();
5312 IEM_MC_END();
5313 }
5314 else
5315 {
5316 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5318
5319 IEM_MC_BEGIN(0, 0);
5320 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5321 IEM_MC_ADVANCE_RIP();
5322 } IEM_MC_ELSE() {
5323 IEM_MC_REL_JMP_S32(i32Imm);
5324 } IEM_MC_ENDIF();
5325 IEM_MC_END();
5326 }
5327 return VINF_SUCCESS;
5328}
5329
5330
5331/** Opcode 0x0f 0x8a. */
5332FNIEMOP_DEF(iemOp_jp_Jv)
5333{
5334 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
5335 IEMOP_HLP_MIN_386();
5336 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5337 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5338 {
5339 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5341
5342 IEM_MC_BEGIN(0, 0);
5343 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5344 IEM_MC_REL_JMP_S16(i16Imm);
5345 } IEM_MC_ELSE() {
5346 IEM_MC_ADVANCE_RIP();
5347 } IEM_MC_ENDIF();
5348 IEM_MC_END();
5349 }
5350 else
5351 {
5352 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5354
5355 IEM_MC_BEGIN(0, 0);
5356 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5357 IEM_MC_REL_JMP_S32(i32Imm);
5358 } IEM_MC_ELSE() {
5359 IEM_MC_ADVANCE_RIP();
5360 } IEM_MC_ENDIF();
5361 IEM_MC_END();
5362 }
5363 return VINF_SUCCESS;
5364}
5365
5366
5367/** Opcode 0x0f 0x8b. */
5368FNIEMOP_DEF(iemOp_jnp_Jv)
5369{
5370 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
5371 IEMOP_HLP_MIN_386();
5372 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5373 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5374 {
5375 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5377
5378 IEM_MC_BEGIN(0, 0);
5379 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5380 IEM_MC_ADVANCE_RIP();
5381 } IEM_MC_ELSE() {
5382 IEM_MC_REL_JMP_S16(i16Imm);
5383 } IEM_MC_ENDIF();
5384 IEM_MC_END();
5385 }
5386 else
5387 {
5388 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5390
5391 IEM_MC_BEGIN(0, 0);
5392 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5393 IEM_MC_ADVANCE_RIP();
5394 } IEM_MC_ELSE() {
5395 IEM_MC_REL_JMP_S32(i32Imm);
5396 } IEM_MC_ENDIF();
5397 IEM_MC_END();
5398 }
5399 return VINF_SUCCESS;
5400}
5401
5402
5403/** Opcode 0x0f 0x8c. */
5404FNIEMOP_DEF(iemOp_jl_Jv)
5405{
5406 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
5407 IEMOP_HLP_MIN_386();
5408 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5409 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5410 {
5411 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5413
5414 IEM_MC_BEGIN(0, 0);
5415 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5416 IEM_MC_REL_JMP_S16(i16Imm);
5417 } IEM_MC_ELSE() {
5418 IEM_MC_ADVANCE_RIP();
5419 } IEM_MC_ENDIF();
5420 IEM_MC_END();
5421 }
5422 else
5423 {
5424 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5426
5427 IEM_MC_BEGIN(0, 0);
5428 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5429 IEM_MC_REL_JMP_S32(i32Imm);
5430 } IEM_MC_ELSE() {
5431 IEM_MC_ADVANCE_RIP();
5432 } IEM_MC_ENDIF();
5433 IEM_MC_END();
5434 }
5435 return VINF_SUCCESS;
5436}
5437
5438
5439/** Opcode 0x0f 0x8d. */
5440FNIEMOP_DEF(iemOp_jnl_Jv)
5441{
5442 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5443 IEMOP_HLP_MIN_386();
5444 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5445 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5446 {
5447 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5449
5450 IEM_MC_BEGIN(0, 0);
5451 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5452 IEM_MC_ADVANCE_RIP();
5453 } IEM_MC_ELSE() {
5454 IEM_MC_REL_JMP_S16(i16Imm);
5455 } IEM_MC_ENDIF();
5456 IEM_MC_END();
5457 }
5458 else
5459 {
5460 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5462
5463 IEM_MC_BEGIN(0, 0);
5464 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5465 IEM_MC_ADVANCE_RIP();
5466 } IEM_MC_ELSE() {
5467 IEM_MC_REL_JMP_S32(i32Imm);
5468 } IEM_MC_ENDIF();
5469 IEM_MC_END();
5470 }
5471 return VINF_SUCCESS;
5472}
5473
5474
5475/** Opcode 0x0f 0x8e. */
5476FNIEMOP_DEF(iemOp_jle_Jv)
5477{
5478 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5479 IEMOP_HLP_MIN_386();
5480 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5481 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5482 {
5483 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5485
5486 IEM_MC_BEGIN(0, 0);
5487 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5488 IEM_MC_REL_JMP_S16(i16Imm);
5489 } IEM_MC_ELSE() {
5490 IEM_MC_ADVANCE_RIP();
5491 } IEM_MC_ENDIF();
5492 IEM_MC_END();
5493 }
5494 else
5495 {
5496 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5498
5499 IEM_MC_BEGIN(0, 0);
5500 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5501 IEM_MC_REL_JMP_S32(i32Imm);
5502 } IEM_MC_ELSE() {
5503 IEM_MC_ADVANCE_RIP();
5504 } IEM_MC_ENDIF();
5505 IEM_MC_END();
5506 }
5507 return VINF_SUCCESS;
5508}
5509
5510
5511/** Opcode 0x0f 0x8f. */
5512FNIEMOP_DEF(iemOp_jnle_Jv)
5513{
5514 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5515 IEMOP_HLP_MIN_386();
5516 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5517 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5518 {
5519 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5521
5522 IEM_MC_BEGIN(0, 0);
5523 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5524 IEM_MC_ADVANCE_RIP();
5525 } IEM_MC_ELSE() {
5526 IEM_MC_REL_JMP_S16(i16Imm);
5527 } IEM_MC_ENDIF();
5528 IEM_MC_END();
5529 }
5530 else
5531 {
5532 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5534
5535 IEM_MC_BEGIN(0, 0);
5536 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5537 IEM_MC_ADVANCE_RIP();
5538 } IEM_MC_ELSE() {
5539 IEM_MC_REL_JMP_S32(i32Imm);
5540 } IEM_MC_ENDIF();
5541 IEM_MC_END();
5542 }
5543 return VINF_SUCCESS;
5544}
5545
5546
5547/** Opcode 0x0f 0x90. */
5548FNIEMOP_DEF(iemOp_seto_Eb)
5549{
5550 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5551 IEMOP_HLP_MIN_386();
5552 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5553
5554 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5555 * any way. AMD says it's "unused", whatever that means. We're
5556 * ignoring for now. */
5557 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5558 {
5559 /* register target */
5560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5561 IEM_MC_BEGIN(0, 0);
5562 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5563 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5564 } IEM_MC_ELSE() {
5565 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5566 } IEM_MC_ENDIF();
5567 IEM_MC_ADVANCE_RIP();
5568 IEM_MC_END();
5569 }
5570 else
5571 {
5572 /* memory target */
5573 IEM_MC_BEGIN(0, 1);
5574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5577 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5578 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5579 } IEM_MC_ELSE() {
5580 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5581 } IEM_MC_ENDIF();
5582 IEM_MC_ADVANCE_RIP();
5583 IEM_MC_END();
5584 }
5585 return VINF_SUCCESS;
5586}
5587
5588
5589/** Opcode 0x0f 0x91. */
5590FNIEMOP_DEF(iemOp_setno_Eb)
5591{
5592 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5593 IEMOP_HLP_MIN_386();
5594 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5595
5596 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5597 * any way. AMD says it's "unused", whatever that means. We're
5598 * ignoring for now. */
5599 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5600 {
5601 /* register target */
5602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5603 IEM_MC_BEGIN(0, 0);
5604 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5605 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5606 } IEM_MC_ELSE() {
5607 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5608 } IEM_MC_ENDIF();
5609 IEM_MC_ADVANCE_RIP();
5610 IEM_MC_END();
5611 }
5612 else
5613 {
5614 /* memory target */
5615 IEM_MC_BEGIN(0, 1);
5616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5619 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5620 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5621 } IEM_MC_ELSE() {
5622 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5623 } IEM_MC_ENDIF();
5624 IEM_MC_ADVANCE_RIP();
5625 IEM_MC_END();
5626 }
5627 return VINF_SUCCESS;
5628}
5629
5630
5631/** Opcode 0x0f 0x92. */
5632FNIEMOP_DEF(iemOp_setc_Eb)
5633{
5634 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5635 IEMOP_HLP_MIN_386();
5636 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5637
5638 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5639 * any way. AMD says it's "unused", whatever that means. We're
5640 * ignoring for now. */
5641 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5642 {
5643 /* register target */
5644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5645 IEM_MC_BEGIN(0, 0);
5646 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5647 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5648 } IEM_MC_ELSE() {
5649 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5650 } IEM_MC_ENDIF();
5651 IEM_MC_ADVANCE_RIP();
5652 IEM_MC_END();
5653 }
5654 else
5655 {
5656 /* memory target */
5657 IEM_MC_BEGIN(0, 1);
5658 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5661 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5662 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5663 } IEM_MC_ELSE() {
5664 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5665 } IEM_MC_ENDIF();
5666 IEM_MC_ADVANCE_RIP();
5667 IEM_MC_END();
5668 }
5669 return VINF_SUCCESS;
5670}
5671
5672
5673/** Opcode 0x0f 0x93. */
5674FNIEMOP_DEF(iemOp_setnc_Eb)
5675{
5676 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5677 IEMOP_HLP_MIN_386();
5678 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5679
5680 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5681 * any way. AMD says it's "unused", whatever that means. We're
5682 * ignoring for now. */
5683 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5684 {
5685 /* register target */
5686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5687 IEM_MC_BEGIN(0, 0);
5688 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5689 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5690 } IEM_MC_ELSE() {
5691 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5692 } IEM_MC_ENDIF();
5693 IEM_MC_ADVANCE_RIP();
5694 IEM_MC_END();
5695 }
5696 else
5697 {
5698 /* memory target */
5699 IEM_MC_BEGIN(0, 1);
5700 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5703 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5704 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5705 } IEM_MC_ELSE() {
5706 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5707 } IEM_MC_ENDIF();
5708 IEM_MC_ADVANCE_RIP();
5709 IEM_MC_END();
5710 }
5711 return VINF_SUCCESS;
5712}
5713
5714
5715/** Opcode 0x0f 0x94. */
5716FNIEMOP_DEF(iemOp_sete_Eb)
5717{
5718 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5719 IEMOP_HLP_MIN_386();
5720 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5721
5722 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5723 * any way. AMD says it's "unused", whatever that means. We're
5724 * ignoring for now. */
5725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5726 {
5727 /* register target */
5728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5729 IEM_MC_BEGIN(0, 0);
5730 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5731 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5732 } IEM_MC_ELSE() {
5733 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5734 } IEM_MC_ENDIF();
5735 IEM_MC_ADVANCE_RIP();
5736 IEM_MC_END();
5737 }
5738 else
5739 {
5740 /* memory target */
5741 IEM_MC_BEGIN(0, 1);
5742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5745 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5746 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5747 } IEM_MC_ELSE() {
5748 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5749 } IEM_MC_ENDIF();
5750 IEM_MC_ADVANCE_RIP();
5751 IEM_MC_END();
5752 }
5753 return VINF_SUCCESS;
5754}
5755
5756
5757/** Opcode 0x0f 0x95. */
5758FNIEMOP_DEF(iemOp_setne_Eb)
5759{
5760 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5761 IEMOP_HLP_MIN_386();
5762 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5763
5764 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5765 * any way. AMD says it's "unused", whatever that means. We're
5766 * ignoring for now. */
5767 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5768 {
5769 /* register target */
5770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5771 IEM_MC_BEGIN(0, 0);
5772 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5773 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5774 } IEM_MC_ELSE() {
5775 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5776 } IEM_MC_ENDIF();
5777 IEM_MC_ADVANCE_RIP();
5778 IEM_MC_END();
5779 }
5780 else
5781 {
5782 /* memory target */
5783 IEM_MC_BEGIN(0, 1);
5784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5787 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5788 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5789 } IEM_MC_ELSE() {
5790 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5791 } IEM_MC_ENDIF();
5792 IEM_MC_ADVANCE_RIP();
5793 IEM_MC_END();
5794 }
5795 return VINF_SUCCESS;
5796}
5797
5798
5799/** Opcode 0x0f 0x96. */
5800FNIEMOP_DEF(iemOp_setbe_Eb)
5801{
5802 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5803 IEMOP_HLP_MIN_386();
5804 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5805
5806 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5807 * any way. AMD says it's "unused", whatever that means. We're
5808 * ignoring for now. */
5809 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5810 {
5811 /* register target */
5812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5813 IEM_MC_BEGIN(0, 0);
5814 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5815 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5816 } IEM_MC_ELSE() {
5817 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5818 } IEM_MC_ENDIF();
5819 IEM_MC_ADVANCE_RIP();
5820 IEM_MC_END();
5821 }
5822 else
5823 {
5824 /* memory target */
5825 IEM_MC_BEGIN(0, 1);
5826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5829 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5830 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5831 } IEM_MC_ELSE() {
5832 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5833 } IEM_MC_ENDIF();
5834 IEM_MC_ADVANCE_RIP();
5835 IEM_MC_END();
5836 }
5837 return VINF_SUCCESS;
5838}
5839
5840
5841/** Opcode 0x0f 0x97. */
5842FNIEMOP_DEF(iemOp_setnbe_Eb)
5843{
5844 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5845 IEMOP_HLP_MIN_386();
5846 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5847
5848 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5849 * any way. AMD says it's "unused", whatever that means. We're
5850 * ignoring for now. */
5851 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5852 {
5853 /* register target */
5854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5855 IEM_MC_BEGIN(0, 0);
5856 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5857 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5858 } IEM_MC_ELSE() {
5859 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5860 } IEM_MC_ENDIF();
5861 IEM_MC_ADVANCE_RIP();
5862 IEM_MC_END();
5863 }
5864 else
5865 {
5866 /* memory target */
5867 IEM_MC_BEGIN(0, 1);
5868 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5869 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5871 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5872 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5873 } IEM_MC_ELSE() {
5874 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5875 } IEM_MC_ENDIF();
5876 IEM_MC_ADVANCE_RIP();
5877 IEM_MC_END();
5878 }
5879 return VINF_SUCCESS;
5880}
5881
5882
5883/** Opcode 0x0f 0x98. */
5884FNIEMOP_DEF(iemOp_sets_Eb)
5885{
5886 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5887 IEMOP_HLP_MIN_386();
5888 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5889
5890 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5891 * any way. AMD says it's "unused", whatever that means. We're
5892 * ignoring for now. */
5893 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5894 {
5895 /* register target */
5896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5897 IEM_MC_BEGIN(0, 0);
5898 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5899 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5900 } IEM_MC_ELSE() {
5901 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5902 } IEM_MC_ENDIF();
5903 IEM_MC_ADVANCE_RIP();
5904 IEM_MC_END();
5905 }
5906 else
5907 {
5908 /* memory target */
5909 IEM_MC_BEGIN(0, 1);
5910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5913 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5914 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5915 } IEM_MC_ELSE() {
5916 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5917 } IEM_MC_ENDIF();
5918 IEM_MC_ADVANCE_RIP();
5919 IEM_MC_END();
5920 }
5921 return VINF_SUCCESS;
5922}
5923
5924
5925/** Opcode 0x0f 0x99. */
5926FNIEMOP_DEF(iemOp_setns_Eb)
5927{
5928 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5929 IEMOP_HLP_MIN_386();
5930 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5931
5932 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5933 * any way. AMD says it's "unused", whatever that means. We're
5934 * ignoring for now. */
5935 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5936 {
5937 /* register target */
5938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5939 IEM_MC_BEGIN(0, 0);
5940 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5941 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5942 } IEM_MC_ELSE() {
5943 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5944 } IEM_MC_ENDIF();
5945 IEM_MC_ADVANCE_RIP();
5946 IEM_MC_END();
5947 }
5948 else
5949 {
5950 /* memory target */
5951 IEM_MC_BEGIN(0, 1);
5952 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5955 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5956 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5957 } IEM_MC_ELSE() {
5958 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5959 } IEM_MC_ENDIF();
5960 IEM_MC_ADVANCE_RIP();
5961 IEM_MC_END();
5962 }
5963 return VINF_SUCCESS;
5964}
5965
5966
5967/** Opcode 0x0f 0x9a. */
5968FNIEMOP_DEF(iemOp_setp_Eb)
5969{
5970 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5971 IEMOP_HLP_MIN_386();
5972 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5973
5974 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5975 * any way. AMD says it's "unused", whatever that means. We're
5976 * ignoring for now. */
5977 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5978 {
5979 /* register target */
5980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5981 IEM_MC_BEGIN(0, 0);
5982 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5983 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5984 } IEM_MC_ELSE() {
5985 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5986 } IEM_MC_ENDIF();
5987 IEM_MC_ADVANCE_RIP();
5988 IEM_MC_END();
5989 }
5990 else
5991 {
5992 /* memory target */
5993 IEM_MC_BEGIN(0, 1);
5994 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5997 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5998 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5999 } IEM_MC_ELSE() {
6000 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6001 } IEM_MC_ENDIF();
6002 IEM_MC_ADVANCE_RIP();
6003 IEM_MC_END();
6004 }
6005 return VINF_SUCCESS;
6006}
6007
6008
6009/** Opcode 0x0f 0x9b. */
6010FNIEMOP_DEF(iemOp_setnp_Eb)
6011{
6012 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
6013 IEMOP_HLP_MIN_386();
6014 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6015
6016 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6017 * any way. AMD says it's "unused", whatever that means. We're
6018 * ignoring for now. */
6019 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6020 {
6021 /* register target */
6022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6023 IEM_MC_BEGIN(0, 0);
6024 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6025 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6026 } IEM_MC_ELSE() {
6027 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6028 } IEM_MC_ENDIF();
6029 IEM_MC_ADVANCE_RIP();
6030 IEM_MC_END();
6031 }
6032 else
6033 {
6034 /* memory target */
6035 IEM_MC_BEGIN(0, 1);
6036 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6037 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6039 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6040 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6041 } IEM_MC_ELSE() {
6042 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6043 } IEM_MC_ENDIF();
6044 IEM_MC_ADVANCE_RIP();
6045 IEM_MC_END();
6046 }
6047 return VINF_SUCCESS;
6048}
6049
6050
6051/** Opcode 0x0f 0x9c. */
6052FNIEMOP_DEF(iemOp_setl_Eb)
6053{
6054 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
6055 IEMOP_HLP_MIN_386();
6056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6057
6058 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6059 * any way. AMD says it's "unused", whatever that means. We're
6060 * ignoring for now. */
6061 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6062 {
6063 /* register target */
6064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6065 IEM_MC_BEGIN(0, 0);
6066 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6067 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6068 } IEM_MC_ELSE() {
6069 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6070 } IEM_MC_ENDIF();
6071 IEM_MC_ADVANCE_RIP();
6072 IEM_MC_END();
6073 }
6074 else
6075 {
6076 /* memory target */
6077 IEM_MC_BEGIN(0, 1);
6078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6081 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6082 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6083 } IEM_MC_ELSE() {
6084 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6085 } IEM_MC_ENDIF();
6086 IEM_MC_ADVANCE_RIP();
6087 IEM_MC_END();
6088 }
6089 return VINF_SUCCESS;
6090}
6091
6092
6093/** Opcode 0x0f 0x9d. */
6094FNIEMOP_DEF(iemOp_setnl_Eb)
6095{
6096 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
6097 IEMOP_HLP_MIN_386();
6098 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6099
6100 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6101 * any way. AMD says it's "unused", whatever that means. We're
6102 * ignoring for now. */
6103 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6104 {
6105 /* register target */
6106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6107 IEM_MC_BEGIN(0, 0);
6108 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6109 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6110 } IEM_MC_ELSE() {
6111 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6112 } IEM_MC_ENDIF();
6113 IEM_MC_ADVANCE_RIP();
6114 IEM_MC_END();
6115 }
6116 else
6117 {
6118 /* memory target */
6119 IEM_MC_BEGIN(0, 1);
6120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6123 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6124 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6125 } IEM_MC_ELSE() {
6126 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6127 } IEM_MC_ENDIF();
6128 IEM_MC_ADVANCE_RIP();
6129 IEM_MC_END();
6130 }
6131 return VINF_SUCCESS;
6132}
6133
6134
6135/** Opcode 0x0f 0x9e. */
6136FNIEMOP_DEF(iemOp_setle_Eb)
6137{
6138 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
6139 IEMOP_HLP_MIN_386();
6140 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6141
6142 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6143 * any way. AMD says it's "unused", whatever that means. We're
6144 * ignoring for now. */
6145 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6146 {
6147 /* register target */
6148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6149 IEM_MC_BEGIN(0, 0);
6150 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6151 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6152 } IEM_MC_ELSE() {
6153 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6154 } IEM_MC_ENDIF();
6155 IEM_MC_ADVANCE_RIP();
6156 IEM_MC_END();
6157 }
6158 else
6159 {
6160 /* memory target */
6161 IEM_MC_BEGIN(0, 1);
6162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6165 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6166 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6167 } IEM_MC_ELSE() {
6168 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6169 } IEM_MC_ENDIF();
6170 IEM_MC_ADVANCE_RIP();
6171 IEM_MC_END();
6172 }
6173 return VINF_SUCCESS;
6174}
6175
6176
6177/** Opcode 0x0f 0x9f. */
6178FNIEMOP_DEF(iemOp_setnle_Eb)
6179{
6180 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
6181 IEMOP_HLP_MIN_386();
6182 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6183
6184 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6185 * any way. AMD says it's "unused", whatever that means. We're
6186 * ignoring for now. */
6187 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6188 {
6189 /* register target */
6190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6191 IEM_MC_BEGIN(0, 0);
6192 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6193 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6194 } IEM_MC_ELSE() {
6195 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6196 } IEM_MC_ENDIF();
6197 IEM_MC_ADVANCE_RIP();
6198 IEM_MC_END();
6199 }
6200 else
6201 {
6202 /* memory target */
6203 IEM_MC_BEGIN(0, 1);
6204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6207 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6208 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6209 } IEM_MC_ELSE() {
6210 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6211 } IEM_MC_ENDIF();
6212 IEM_MC_ADVANCE_RIP();
6213 IEM_MC_END();
6214 }
6215 return VINF_SUCCESS;
6216}
6217
6218
6219/**
6220 * Common 'push segment-register' helper.
6221 */
6222FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
6223{
6224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6225 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
6226 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6227
6228 switch (pVCpu->iem.s.enmEffOpSize)
6229 {
6230 case IEMMODE_16BIT:
6231 IEM_MC_BEGIN(0, 1);
6232 IEM_MC_LOCAL(uint16_t, u16Value);
6233 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
6234 IEM_MC_PUSH_U16(u16Value);
6235 IEM_MC_ADVANCE_RIP();
6236 IEM_MC_END();
6237 break;
6238
6239 case IEMMODE_32BIT:
6240 IEM_MC_BEGIN(0, 1);
6241 IEM_MC_LOCAL(uint32_t, u32Value);
6242 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
6243 IEM_MC_PUSH_U32_SREG(u32Value);
6244 IEM_MC_ADVANCE_RIP();
6245 IEM_MC_END();
6246 break;
6247
6248 case IEMMODE_64BIT:
6249 IEM_MC_BEGIN(0, 1);
6250 IEM_MC_LOCAL(uint64_t, u64Value);
6251 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
6252 IEM_MC_PUSH_U64(u64Value);
6253 IEM_MC_ADVANCE_RIP();
6254 IEM_MC_END();
6255 break;
6256 }
6257
6258 return VINF_SUCCESS;
6259}
6260
6261
6262/** Opcode 0x0f 0xa0. */
6263FNIEMOP_DEF(iemOp_push_fs)
6264{
6265 IEMOP_MNEMONIC(push_fs, "push fs");
6266 IEMOP_HLP_MIN_386();
6267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6268 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
6269}
6270
6271
6272/** Opcode 0x0f 0xa1. */
6273FNIEMOP_DEF(iemOp_pop_fs)
6274{
6275 IEMOP_MNEMONIC(pop_fs, "pop fs");
6276 IEMOP_HLP_MIN_386();
6277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6278 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
6279}
6280
6281
6282/** Opcode 0x0f 0xa2. */
6283FNIEMOP_DEF(iemOp_cpuid)
6284{
6285 IEMOP_MNEMONIC(cpuid, "cpuid");
6286 IEMOP_HLP_MIN_486(); /* not all 486es. */
6287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6288 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
6289}
6290
6291
6292/**
6293 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
6294 * iemOp_bts_Ev_Gv.
6295 */
6296FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
6297{
6298 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6299 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6300
6301 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6302 {
6303 /* register destination. */
6304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6305 switch (pVCpu->iem.s.enmEffOpSize)
6306 {
6307 case IEMMODE_16BIT:
6308 IEM_MC_BEGIN(3, 0);
6309 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6310 IEM_MC_ARG(uint16_t, u16Src, 1);
6311 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6312
6313 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6314 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
6315 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6316 IEM_MC_REF_EFLAGS(pEFlags);
6317 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6318
6319 IEM_MC_ADVANCE_RIP();
6320 IEM_MC_END();
6321 return VINF_SUCCESS;
6322
6323 case IEMMODE_32BIT:
6324 IEM_MC_BEGIN(3, 0);
6325 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6326 IEM_MC_ARG(uint32_t, u32Src, 1);
6327 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6328
6329 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6330 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
6331 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6332 IEM_MC_REF_EFLAGS(pEFlags);
6333 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6334
6335 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6336 IEM_MC_ADVANCE_RIP();
6337 IEM_MC_END();
6338 return VINF_SUCCESS;
6339
6340 case IEMMODE_64BIT:
6341 IEM_MC_BEGIN(3, 0);
6342 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6343 IEM_MC_ARG(uint64_t, u64Src, 1);
6344 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6345
6346 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6347 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
6348 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6349 IEM_MC_REF_EFLAGS(pEFlags);
6350 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6351
6352 IEM_MC_ADVANCE_RIP();
6353 IEM_MC_END();
6354 return VINF_SUCCESS;
6355
6356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6357 }
6358 }
6359 else
6360 {
6361 /* memory destination. */
6362
6363 uint32_t fAccess;
6364 if (pImpl->pfnLockedU16)
6365 fAccess = IEM_ACCESS_DATA_RW;
6366 else /* BT */
6367 fAccess = IEM_ACCESS_DATA_R;
6368
6369 /** @todo test negative bit offsets! */
6370 switch (pVCpu->iem.s.enmEffOpSize)
6371 {
6372 case IEMMODE_16BIT:
6373 IEM_MC_BEGIN(3, 2);
6374 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6375 IEM_MC_ARG(uint16_t, u16Src, 1);
6376 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6377 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6378 IEM_MC_LOCAL(int16_t, i16AddrAdj);
6379
6380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6381 if (pImpl->pfnLockedU16)
6382 IEMOP_HLP_DONE_DECODING();
6383 else
6384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6385 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6386 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
6387 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
6388 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
6389 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
6390 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
6391 IEM_MC_FETCH_EFLAGS(EFlags);
6392
6393 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6394 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6395 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6396 else
6397 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6398 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6399
6400 IEM_MC_COMMIT_EFLAGS(EFlags);
6401 IEM_MC_ADVANCE_RIP();
6402 IEM_MC_END();
6403 return VINF_SUCCESS;
6404
6405 case IEMMODE_32BIT:
6406 IEM_MC_BEGIN(3, 2);
6407 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6408 IEM_MC_ARG(uint32_t, u32Src, 1);
6409 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6410 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6411 IEM_MC_LOCAL(int32_t, i32AddrAdj);
6412
6413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6414 if (pImpl->pfnLockedU16)
6415 IEMOP_HLP_DONE_DECODING();
6416 else
6417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6418 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6419 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
6420 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
6421 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
6422 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
6423 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
6424 IEM_MC_FETCH_EFLAGS(EFlags);
6425
6426 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6427 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6428 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6429 else
6430 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6431 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6432
6433 IEM_MC_COMMIT_EFLAGS(EFlags);
6434 IEM_MC_ADVANCE_RIP();
6435 IEM_MC_END();
6436 return VINF_SUCCESS;
6437
6438 case IEMMODE_64BIT:
6439 IEM_MC_BEGIN(3, 2);
6440 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6441 IEM_MC_ARG(uint64_t, u64Src, 1);
6442 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6444 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6445
6446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6447 if (pImpl->pfnLockedU16)
6448 IEMOP_HLP_DONE_DECODING();
6449 else
6450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6451 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6452 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6453 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6454 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6455 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6456 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6457 IEM_MC_FETCH_EFLAGS(EFlags);
6458
6459 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6460 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6461 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6462 else
6463 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6464 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6465
6466 IEM_MC_COMMIT_EFLAGS(EFlags);
6467 IEM_MC_ADVANCE_RIP();
6468 IEM_MC_END();
6469 return VINF_SUCCESS;
6470
6471 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6472 }
6473 }
6474}
6475
6476
6477/** Opcode 0x0f 0xa3. */
6478FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6479{
6480 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6481 IEMOP_HLP_MIN_386();
6482 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6483}
6484
6485
6486/**
6487 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6488 */
6489FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6490{
6491 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6492 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6493
6494 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6495 {
6496 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6498
6499 switch (pVCpu->iem.s.enmEffOpSize)
6500 {
6501 case IEMMODE_16BIT:
6502 IEM_MC_BEGIN(4, 0);
6503 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6504 IEM_MC_ARG(uint16_t, u16Src, 1);
6505 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6506 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6507
6508 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6509 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6510 IEM_MC_REF_EFLAGS(pEFlags);
6511 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6512
6513 IEM_MC_ADVANCE_RIP();
6514 IEM_MC_END();
6515 return VINF_SUCCESS;
6516
6517 case IEMMODE_32BIT:
6518 IEM_MC_BEGIN(4, 0);
6519 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6520 IEM_MC_ARG(uint32_t, u32Src, 1);
6521 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6522 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6523
6524 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6525 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6526 IEM_MC_REF_EFLAGS(pEFlags);
6527 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6528
6529 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6530 IEM_MC_ADVANCE_RIP();
6531 IEM_MC_END();
6532 return VINF_SUCCESS;
6533
6534 case IEMMODE_64BIT:
6535 IEM_MC_BEGIN(4, 0);
6536 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6537 IEM_MC_ARG(uint64_t, u64Src, 1);
6538 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6539 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6540
6541 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6542 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6543 IEM_MC_REF_EFLAGS(pEFlags);
6544 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6545
6546 IEM_MC_ADVANCE_RIP();
6547 IEM_MC_END();
6548 return VINF_SUCCESS;
6549
6550 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6551 }
6552 }
6553 else
6554 {
6555 switch (pVCpu->iem.s.enmEffOpSize)
6556 {
6557 case IEMMODE_16BIT:
6558 IEM_MC_BEGIN(4, 2);
6559 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6560 IEM_MC_ARG(uint16_t, u16Src, 1);
6561 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6562 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6563 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6564
6565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6566 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6567 IEM_MC_ASSIGN(cShiftArg, cShift);
6568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6569 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6570 IEM_MC_FETCH_EFLAGS(EFlags);
6571 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6572 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6573
6574 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6575 IEM_MC_COMMIT_EFLAGS(EFlags);
6576 IEM_MC_ADVANCE_RIP();
6577 IEM_MC_END();
6578 return VINF_SUCCESS;
6579
6580 case IEMMODE_32BIT:
6581 IEM_MC_BEGIN(4, 2);
6582 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6583 IEM_MC_ARG(uint32_t, u32Src, 1);
6584 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6585 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6587
6588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6589 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6590 IEM_MC_ASSIGN(cShiftArg, cShift);
6591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6592 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6593 IEM_MC_FETCH_EFLAGS(EFlags);
6594 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6595 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6596
6597 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6598 IEM_MC_COMMIT_EFLAGS(EFlags);
6599 IEM_MC_ADVANCE_RIP();
6600 IEM_MC_END();
6601 return VINF_SUCCESS;
6602
6603 case IEMMODE_64BIT:
6604 IEM_MC_BEGIN(4, 2);
6605 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6606 IEM_MC_ARG(uint64_t, u64Src, 1);
6607 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6608 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6610
6611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6612 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6613 IEM_MC_ASSIGN(cShiftArg, cShift);
6614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6615 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6616 IEM_MC_FETCH_EFLAGS(EFlags);
6617 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6618 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6619
6620 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6621 IEM_MC_COMMIT_EFLAGS(EFlags);
6622 IEM_MC_ADVANCE_RIP();
6623 IEM_MC_END();
6624 return VINF_SUCCESS;
6625
6626 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6627 }
6628 }
6629}
6630
6631
6632/**
6633 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6634 */
6635FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6636{
6637 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6638 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6639
6640 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6641 {
6642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6643
6644 switch (pVCpu->iem.s.enmEffOpSize)
6645 {
6646 case IEMMODE_16BIT:
6647 IEM_MC_BEGIN(4, 0);
6648 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6649 IEM_MC_ARG(uint16_t, u16Src, 1);
6650 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6651 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6652
6653 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6654 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6655 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6656 IEM_MC_REF_EFLAGS(pEFlags);
6657 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6658
6659 IEM_MC_ADVANCE_RIP();
6660 IEM_MC_END();
6661 return VINF_SUCCESS;
6662
6663 case IEMMODE_32BIT:
6664 IEM_MC_BEGIN(4, 0);
6665 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6666 IEM_MC_ARG(uint32_t, u32Src, 1);
6667 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6668 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6669
6670 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6671 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6672 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6673 IEM_MC_REF_EFLAGS(pEFlags);
6674 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6675
6676 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6677 IEM_MC_ADVANCE_RIP();
6678 IEM_MC_END();
6679 return VINF_SUCCESS;
6680
6681 case IEMMODE_64BIT:
6682 IEM_MC_BEGIN(4, 0);
6683 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6684 IEM_MC_ARG(uint64_t, u64Src, 1);
6685 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6686 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6687
6688 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6689 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6690 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6691 IEM_MC_REF_EFLAGS(pEFlags);
6692 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6693
6694 IEM_MC_ADVANCE_RIP();
6695 IEM_MC_END();
6696 return VINF_SUCCESS;
6697
6698 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6699 }
6700 }
6701 else
6702 {
6703 switch (pVCpu->iem.s.enmEffOpSize)
6704 {
6705 case IEMMODE_16BIT:
6706 IEM_MC_BEGIN(4, 2);
6707 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6708 IEM_MC_ARG(uint16_t, u16Src, 1);
6709 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6710 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6711 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6712
6713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6715 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6716 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6717 IEM_MC_FETCH_EFLAGS(EFlags);
6718 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6719 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6720
6721 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6722 IEM_MC_COMMIT_EFLAGS(EFlags);
6723 IEM_MC_ADVANCE_RIP();
6724 IEM_MC_END();
6725 return VINF_SUCCESS;
6726
6727 case IEMMODE_32BIT:
6728 IEM_MC_BEGIN(4, 2);
6729 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6730 IEM_MC_ARG(uint32_t, u32Src, 1);
6731 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6732 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6734
6735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6737 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6738 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6739 IEM_MC_FETCH_EFLAGS(EFlags);
6740 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6741 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6742
6743 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6744 IEM_MC_COMMIT_EFLAGS(EFlags);
6745 IEM_MC_ADVANCE_RIP();
6746 IEM_MC_END();
6747 return VINF_SUCCESS;
6748
6749 case IEMMODE_64BIT:
6750 IEM_MC_BEGIN(4, 2);
6751 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6752 IEM_MC_ARG(uint64_t, u64Src, 1);
6753 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6754 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6756
6757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6759 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6760 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6761 IEM_MC_FETCH_EFLAGS(EFlags);
6762 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6763 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6764
6765 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6766 IEM_MC_COMMIT_EFLAGS(EFlags);
6767 IEM_MC_ADVANCE_RIP();
6768 IEM_MC_END();
6769 return VINF_SUCCESS;
6770
6771 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6772 }
6773 }
6774}
6775
6776
6777
6778/** Opcode 0x0f 0xa4. */
6779FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6780{
6781 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6782 IEMOP_HLP_MIN_386();
6783 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
6784}
6785
6786
6787/** Opcode 0x0f 0xa5. */
6788FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6789{
6790 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6791 IEMOP_HLP_MIN_386();
6792 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
6793}
6794
6795
6796/** Opcode 0x0f 0xa8. */
6797FNIEMOP_DEF(iemOp_push_gs)
6798{
6799 IEMOP_MNEMONIC(push_gs, "push gs");
6800 IEMOP_HLP_MIN_386();
6801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6802 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6803}
6804
6805
6806/** Opcode 0x0f 0xa9. */
6807FNIEMOP_DEF(iemOp_pop_gs)
6808{
6809 IEMOP_MNEMONIC(pop_gs, "pop gs");
6810 IEMOP_HLP_MIN_386();
6811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6812 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6813}
6814
6815
6816/** Opcode 0x0f 0xaa. */
6817FNIEMOP_DEF(iemOp_rsm)
6818{
6819 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
6820 IEMOP_HLP_MIN_386(); /* 386SL and later. */
6821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6822 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
6823}
6824
6825
6826
6827/** Opcode 0x0f 0xab. */
6828FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6829{
6830 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6831 IEMOP_HLP_MIN_386();
6832 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6833}
6834
6835
6836/** Opcode 0x0f 0xac. */
6837FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6838{
6839 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6840 IEMOP_HLP_MIN_386();
6841 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
6842}
6843
6844
6845/** Opcode 0x0f 0xad. */
6846FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6847{
6848 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6849 IEMOP_HLP_MIN_386();
6850 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
6851}
6852
6853
6854/** Opcode 0x0f 0xae mem/0. */
6855FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6856{
6857 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6858 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6859 return IEMOP_RAISE_INVALID_OPCODE();
6860
6861 IEM_MC_BEGIN(3, 1);
6862 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6863 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6864 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6867 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6868 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6869 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6870 IEM_MC_END();
6871 return VINF_SUCCESS;
6872}
6873
6874
6875/** Opcode 0x0f 0xae mem/1. */
6876FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6877{
6878 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6879 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6880 return IEMOP_RAISE_INVALID_OPCODE();
6881
6882 IEM_MC_BEGIN(3, 1);
6883 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6884 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6885 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6888 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6889 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6890 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6891 IEM_MC_END();
6892 return VINF_SUCCESS;
6893}
6894
6895
6896/**
6897 * @opmaps grp15
6898 * @opcode !11/2
6899 * @oppfx none
6900 * @opcpuid sse
6901 * @opgroup og_sse_mxcsrsm
6902 * @opxcpttype 5
6903 * @optest op1=0 -> mxcsr=0
6904 * @optest op1=0x2083 -> mxcsr=0x2083
6905 * @optest op1=0xfffffffe -> value.xcpt=0xd
6906 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6907 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6908 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6909 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6910 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6911 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6912 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6913 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6914 */
6915FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6916{
6917 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6918 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6919 return IEMOP_RAISE_INVALID_OPCODE();
6920
6921 IEM_MC_BEGIN(2, 0);
6922 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6923 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6926 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6927 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6928 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6929 IEM_MC_END();
6930 return VINF_SUCCESS;
6931}
6932
6933
6934/**
6935 * @opmaps grp15
6936 * @opcode !11/3
6937 * @oppfx none
6938 * @opcpuid sse
6939 * @opgroup og_sse_mxcsrsm
6940 * @opxcpttype 5
6941 * @optest mxcsr=0 -> op1=0
6942 * @optest mxcsr=0x2083 -> op1=0x2083
6943 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6944 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6945 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6946 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6947 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6948 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6949 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6950 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6951 */
6952FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6953{
6954 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6955 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6956 return IEMOP_RAISE_INVALID_OPCODE();
6957
6958 IEM_MC_BEGIN(2, 0);
6959 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6960 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6963 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6964 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6965 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6966 IEM_MC_END();
6967 return VINF_SUCCESS;
6968}
6969
6970
6971/**
6972 * @opmaps grp15
6973 * @opcode !11/4
6974 * @oppfx none
6975 * @opcpuid xsave
6976 * @opgroup og_system
6977 * @opxcpttype none
6978 */
6979FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6980{
6981 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
6982 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6983 return IEMOP_RAISE_INVALID_OPCODE();
6984
6985 IEM_MC_BEGIN(3, 0);
6986 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6987 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6988 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6991 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6992 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6993 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6994 IEM_MC_END();
6995 return VINF_SUCCESS;
6996}
6997
6998
6999/**
7000 * @opmaps grp15
7001 * @opcode !11/5
7002 * @oppfx none
7003 * @opcpuid xsave
7004 * @opgroup og_system
7005 * @opxcpttype none
7006 */
7007FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
7008{
7009 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
7010 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
7011 return IEMOP_RAISE_INVALID_OPCODE();
7012
7013 IEM_MC_BEGIN(3, 0);
7014 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7015 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7016 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7019 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7020 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7021 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
7022 IEM_MC_END();
7023 return VINF_SUCCESS;
7024}
7025
7026/** Opcode 0x0f 0xae mem/6. */
7027FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
7028
7029/**
7030 * @opmaps grp15
7031 * @opcode !11/7
7032 * @oppfx none
7033 * @opcpuid clfsh
7034 * @opgroup og_cachectl
7035 * @optest op1=1 ->
7036 */
7037FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
7038{
7039 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7040 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
7041 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
7042
7043 IEM_MC_BEGIN(2, 0);
7044 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7045 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7048 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7049 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
7050 IEM_MC_END();
7051 return VINF_SUCCESS;
7052}
7053
7054/**
7055 * @opmaps grp15
7056 * @opcode !11/7
7057 * @oppfx 0x66
7058 * @opcpuid clflushopt
7059 * @opgroup og_cachectl
7060 * @optest op1=1 ->
7061 */
7062FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
7063{
7064 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7065 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
7066 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
7067
7068 IEM_MC_BEGIN(2, 0);
7069 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7070 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7071 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7073 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7074 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
7075 IEM_MC_END();
7076 return VINF_SUCCESS;
7077}
7078
7079
7080/** Opcode 0x0f 0xae 11b/5. */
7081FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
7082{
7083 RT_NOREF_PV(bRm);
7084 IEMOP_MNEMONIC(lfence, "lfence");
7085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7086 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7087 return IEMOP_RAISE_INVALID_OPCODE();
7088
7089 IEM_MC_BEGIN(0, 0);
7090#ifndef RT_ARCH_ARM64
7091 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7092#endif
7093 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
7094#ifndef RT_ARCH_ARM64
7095 else
7096 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7097#endif
7098 IEM_MC_ADVANCE_RIP();
7099 IEM_MC_END();
7100 return VINF_SUCCESS;
7101}
7102
7103
7104/** Opcode 0x0f 0xae 11b/6. */
7105FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
7106{
7107 RT_NOREF_PV(bRm);
7108 IEMOP_MNEMONIC(mfence, "mfence");
7109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7110 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7111 return IEMOP_RAISE_INVALID_OPCODE();
7112
7113 IEM_MC_BEGIN(0, 0);
7114#ifndef RT_ARCH_ARM64
7115 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7116#endif
7117 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
7118#ifndef RT_ARCH_ARM64
7119 else
7120 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7121#endif
7122 IEM_MC_ADVANCE_RIP();
7123 IEM_MC_END();
7124 return VINF_SUCCESS;
7125}
7126
7127
7128/** Opcode 0x0f 0xae 11b/7. */
7129FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
7130{
7131 RT_NOREF_PV(bRm);
7132 IEMOP_MNEMONIC(sfence, "sfence");
7133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7134 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7135 return IEMOP_RAISE_INVALID_OPCODE();
7136
7137 IEM_MC_BEGIN(0, 0);
7138#ifndef RT_ARCH_ARM64
7139 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7140#endif
7141 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
7142#ifndef RT_ARCH_ARM64
7143 else
7144 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7145#endif
7146 IEM_MC_ADVANCE_RIP();
7147 IEM_MC_END();
7148 return VINF_SUCCESS;
7149}
7150
7151
7152/** Opcode 0xf3 0x0f 0xae 11b/0. */
7153FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
7154{
7155 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
7156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7157 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7158 {
7159 IEM_MC_BEGIN(1, 0);
7160 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7161 IEM_MC_ARG(uint64_t, u64Dst, 0);
7162 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
7163 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
7164 IEM_MC_ADVANCE_RIP();
7165 IEM_MC_END();
7166 }
7167 else
7168 {
7169 IEM_MC_BEGIN(1, 0);
7170 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7171 IEM_MC_ARG(uint32_t, u32Dst, 0);
7172 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
7173 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
7174 IEM_MC_ADVANCE_RIP();
7175 IEM_MC_END();
7176 }
7177 return VINF_SUCCESS;
7178}
7179
7180
7181/** Opcode 0xf3 0x0f 0xae 11b/1. */
7182FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
7183{
7184 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
7185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7186 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7187 {
7188 IEM_MC_BEGIN(1, 0);
7189 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7190 IEM_MC_ARG(uint64_t, u64Dst, 0);
7191 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
7192 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
7193 IEM_MC_ADVANCE_RIP();
7194 IEM_MC_END();
7195 }
7196 else
7197 {
7198 IEM_MC_BEGIN(1, 0);
7199 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7200 IEM_MC_ARG(uint32_t, u32Dst, 0);
7201 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
7202 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
7203 IEM_MC_ADVANCE_RIP();
7204 IEM_MC_END();
7205 }
7206 return VINF_SUCCESS;
7207}
7208
7209
7210/** Opcode 0xf3 0x0f 0xae 11b/2. */
7211FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
7212{
7213 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
7214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7215 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7216 {
7217 IEM_MC_BEGIN(1, 0);
7218 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7219 IEM_MC_ARG(uint64_t, u64Dst, 0);
7220 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7221 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7222 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
7223 IEM_MC_ADVANCE_RIP();
7224 IEM_MC_END();
7225 }
7226 else
7227 {
7228 IEM_MC_BEGIN(1, 0);
7229 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7230 IEM_MC_ARG(uint32_t, u32Dst, 0);
7231 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7232 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
7233 IEM_MC_ADVANCE_RIP();
7234 IEM_MC_END();
7235 }
7236 return VINF_SUCCESS;
7237}
7238
7239
7240/** Opcode 0xf3 0x0f 0xae 11b/3. */
7241FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
7242{
7243 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
7244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7245 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7246 {
7247 IEM_MC_BEGIN(1, 0);
7248 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7249 IEM_MC_ARG(uint64_t, u64Dst, 0);
7250 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7251 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7252 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
7253 IEM_MC_ADVANCE_RIP();
7254 IEM_MC_END();
7255 }
7256 else
7257 {
7258 IEM_MC_BEGIN(1, 0);
7259 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7260 IEM_MC_ARG(uint32_t, u32Dst, 0);
7261 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7262 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
7263 IEM_MC_ADVANCE_RIP();
7264 IEM_MC_END();
7265 }
7266 return VINF_SUCCESS;
7267}
7268
7269
7270/**
7271 * Group 15 jump table for register variant.
7272 */
7273IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
7274{ /* pfx: none, 066h, 0f3h, 0f2h */
7275 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
7276 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
7277 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
7278 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
7279 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7280 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7281 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7282 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7283};
7284AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
7285
7286
7287/**
7288 * Group 15 jump table for memory variant.
7289 */
7290IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
7291{ /* pfx: none, 066h, 0f3h, 0f2h */
7292 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7293 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7294 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7295 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7296 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7297 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7298 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7299 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7300};
7301AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
7302
7303
7304/** Opcode 0x0f 0xae. */
7305FNIEMOP_DEF(iemOp_Grp15)
7306{
7307 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
7308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7309 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7310 /* register, register */
7311 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7312 + pVCpu->iem.s.idxPrefix], bRm);
7313 /* memory, register */
7314 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7315 + pVCpu->iem.s.idxPrefix], bRm);
7316}
7317
7318
7319/** Opcode 0x0f 0xaf. */
7320FNIEMOP_DEF(iemOp_imul_Gv_Ev)
7321{
7322 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
7323 IEMOP_HLP_MIN_386();
7324 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7325 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags));
7326}
7327
7328
7329/** Opcode 0x0f 0xb0. */
7330FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
7331{
7332 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
7333 IEMOP_HLP_MIN_486();
7334 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7335
7336 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7337 {
7338 IEMOP_HLP_DONE_DECODING();
7339 IEM_MC_BEGIN(4, 0);
7340 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7341 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7342 IEM_MC_ARG(uint8_t, u8Src, 2);
7343 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7344
7345 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7346 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7347 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
7348 IEM_MC_REF_EFLAGS(pEFlags);
7349 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7350 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7351 else
7352 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7353
7354 IEM_MC_ADVANCE_RIP();
7355 IEM_MC_END();
7356 }
7357 else
7358 {
7359 IEM_MC_BEGIN(4, 3);
7360 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7361 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7362 IEM_MC_ARG(uint8_t, u8Src, 2);
7363 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7365 IEM_MC_LOCAL(uint8_t, u8Al);
7366
7367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7368 IEMOP_HLP_DONE_DECODING();
7369 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7370 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7371 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
7372 IEM_MC_FETCH_EFLAGS(EFlags);
7373 IEM_MC_REF_LOCAL(pu8Al, u8Al);
7374 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7375 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7376 else
7377 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7378
7379 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7380 IEM_MC_COMMIT_EFLAGS(EFlags);
7381 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
7382 IEM_MC_ADVANCE_RIP();
7383 IEM_MC_END();
7384 }
7385 return VINF_SUCCESS;
7386}
7387
7388/** Opcode 0x0f 0xb1. */
7389FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
7390{
7391 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
7392 IEMOP_HLP_MIN_486();
7393 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7394
7395 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7396 {
7397 IEMOP_HLP_DONE_DECODING();
7398 switch (pVCpu->iem.s.enmEffOpSize)
7399 {
7400 case IEMMODE_16BIT:
7401 IEM_MC_BEGIN(4, 0);
7402 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7403 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7404 IEM_MC_ARG(uint16_t, u16Src, 2);
7405 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7406
7407 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7408 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7409 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
7410 IEM_MC_REF_EFLAGS(pEFlags);
7411 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7412 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7413 else
7414 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7415
7416 IEM_MC_ADVANCE_RIP();
7417 IEM_MC_END();
7418 return VINF_SUCCESS;
7419
7420 case IEMMODE_32BIT:
7421 IEM_MC_BEGIN(4, 0);
7422 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7423 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7424 IEM_MC_ARG(uint32_t, u32Src, 2);
7425 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7426
7427 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7428 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7429 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
7430 IEM_MC_REF_EFLAGS(pEFlags);
7431 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7432 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7433 else
7434 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7435
7436 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
7437 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7438 IEM_MC_ADVANCE_RIP();
7439 IEM_MC_END();
7440 return VINF_SUCCESS;
7441
7442 case IEMMODE_64BIT:
7443 IEM_MC_BEGIN(4, 0);
7444 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7445 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7446#ifdef RT_ARCH_X86
7447 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7448#else
7449 IEM_MC_ARG(uint64_t, u64Src, 2);
7450#endif
7451 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7452
7453 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7454 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
7455 IEM_MC_REF_EFLAGS(pEFlags);
7456#ifdef RT_ARCH_X86
7457 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7458 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7459 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7460 else
7461 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7462#else
7463 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7464 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7465 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7466 else
7467 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7468#endif
7469
7470 IEM_MC_ADVANCE_RIP();
7471 IEM_MC_END();
7472 return VINF_SUCCESS;
7473
7474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7475 }
7476 }
7477 else
7478 {
7479 switch (pVCpu->iem.s.enmEffOpSize)
7480 {
7481 case IEMMODE_16BIT:
7482 IEM_MC_BEGIN(4, 3);
7483 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7484 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7485 IEM_MC_ARG(uint16_t, u16Src, 2);
7486 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7488 IEM_MC_LOCAL(uint16_t, u16Ax);
7489
7490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7491 IEMOP_HLP_DONE_DECODING();
7492 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7493 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7494 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
7495 IEM_MC_FETCH_EFLAGS(EFlags);
7496 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
7497 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7498 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7499 else
7500 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7501
7502 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7503 IEM_MC_COMMIT_EFLAGS(EFlags);
7504 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
7505 IEM_MC_ADVANCE_RIP();
7506 IEM_MC_END();
7507 return VINF_SUCCESS;
7508
7509 case IEMMODE_32BIT:
7510 IEM_MC_BEGIN(4, 3);
7511 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7512 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7513 IEM_MC_ARG(uint32_t, u32Src, 2);
7514 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7515 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7516 IEM_MC_LOCAL(uint32_t, u32Eax);
7517
7518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7519 IEMOP_HLP_DONE_DECODING();
7520 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7521 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7522 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
7523 IEM_MC_FETCH_EFLAGS(EFlags);
7524 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
7525 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7526 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7527 else
7528 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7529
7530 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7531 IEM_MC_COMMIT_EFLAGS(EFlags);
7532 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
7533 IEM_MC_ADVANCE_RIP();
7534 IEM_MC_END();
7535 return VINF_SUCCESS;
7536
7537 case IEMMODE_64BIT:
7538 IEM_MC_BEGIN(4, 3);
7539 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7540 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7541#ifdef RT_ARCH_X86
7542 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7543#else
7544 IEM_MC_ARG(uint64_t, u64Src, 2);
7545#endif
7546 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7548 IEM_MC_LOCAL(uint64_t, u64Rax);
7549
7550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7551 IEMOP_HLP_DONE_DECODING();
7552 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7553 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
7554 IEM_MC_FETCH_EFLAGS(EFlags);
7555 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
7556#ifdef RT_ARCH_X86
7557 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7558 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7559 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7560 else
7561 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7562#else
7563 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7564 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7565 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7566 else
7567 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7568#endif
7569
7570 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7571 IEM_MC_COMMIT_EFLAGS(EFlags);
7572 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
7573 IEM_MC_ADVANCE_RIP();
7574 IEM_MC_END();
7575 return VINF_SUCCESS;
7576
7577 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7578 }
7579 }
7580}
7581
7582
7583FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
7584{
7585 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
7586 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
7587
7588 switch (pVCpu->iem.s.enmEffOpSize)
7589 {
7590 case IEMMODE_16BIT:
7591 IEM_MC_BEGIN(5, 1);
7592 IEM_MC_ARG(uint16_t, uSel, 0);
7593 IEM_MC_ARG(uint16_t, offSeg, 1);
7594 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7595 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7596 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7597 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7600 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7601 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
7602 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7603 IEM_MC_END();
7604 return VINF_SUCCESS;
7605
7606 case IEMMODE_32BIT:
7607 IEM_MC_BEGIN(5, 1);
7608 IEM_MC_ARG(uint16_t, uSel, 0);
7609 IEM_MC_ARG(uint32_t, offSeg, 1);
7610 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7611 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7612 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7613 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7616 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7617 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
7618 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7619 IEM_MC_END();
7620 return VINF_SUCCESS;
7621
7622 case IEMMODE_64BIT:
7623 IEM_MC_BEGIN(5, 1);
7624 IEM_MC_ARG(uint16_t, uSel, 0);
7625 IEM_MC_ARG(uint64_t, offSeg, 1);
7626 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7627 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7628 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7629 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7632 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
7633 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7634 else
7635 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7636 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
7637 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7638 IEM_MC_END();
7639 return VINF_SUCCESS;
7640
7641 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7642 }
7643}
7644
7645
7646/** Opcode 0x0f 0xb2. */
7647FNIEMOP_DEF(iemOp_lss_Gv_Mp)
7648{
7649 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
7650 IEMOP_HLP_MIN_386();
7651 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7652 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7653 return IEMOP_RAISE_INVALID_OPCODE();
7654 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
7655}
7656
7657
7658/** Opcode 0x0f 0xb3. */
7659FNIEMOP_DEF(iemOp_btr_Ev_Gv)
7660{
7661 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
7662 IEMOP_HLP_MIN_386();
7663 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
7664}
7665
7666
7667/** Opcode 0x0f 0xb4. */
7668FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
7669{
7670 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
7671 IEMOP_HLP_MIN_386();
7672 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7673 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7674 return IEMOP_RAISE_INVALID_OPCODE();
7675 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
7676}
7677
7678
7679/** Opcode 0x0f 0xb5. */
7680FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
7681{
7682 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
7683 IEMOP_HLP_MIN_386();
7684 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7685 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7686 return IEMOP_RAISE_INVALID_OPCODE();
7687 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
7688}
7689
7690
7691/** Opcode 0x0f 0xb6. */
7692FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
7693{
7694 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
7695 IEMOP_HLP_MIN_386();
7696
7697 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7698
7699 /*
7700 * If rm is denoting a register, no more instruction bytes.
7701 */
7702 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7703 {
7704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7705 switch (pVCpu->iem.s.enmEffOpSize)
7706 {
7707 case IEMMODE_16BIT:
7708 IEM_MC_BEGIN(0, 1);
7709 IEM_MC_LOCAL(uint16_t, u16Value);
7710 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7711 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7712 IEM_MC_ADVANCE_RIP();
7713 IEM_MC_END();
7714 return VINF_SUCCESS;
7715
7716 case IEMMODE_32BIT:
7717 IEM_MC_BEGIN(0, 1);
7718 IEM_MC_LOCAL(uint32_t, u32Value);
7719 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7720 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7721 IEM_MC_ADVANCE_RIP();
7722 IEM_MC_END();
7723 return VINF_SUCCESS;
7724
7725 case IEMMODE_64BIT:
7726 IEM_MC_BEGIN(0, 1);
7727 IEM_MC_LOCAL(uint64_t, u64Value);
7728 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7729 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7730 IEM_MC_ADVANCE_RIP();
7731 IEM_MC_END();
7732 return VINF_SUCCESS;
7733
7734 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7735 }
7736 }
7737 else
7738 {
7739 /*
7740 * We're loading a register from memory.
7741 */
7742 switch (pVCpu->iem.s.enmEffOpSize)
7743 {
7744 case IEMMODE_16BIT:
7745 IEM_MC_BEGIN(0, 2);
7746 IEM_MC_LOCAL(uint16_t, u16Value);
7747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7750 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7751 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7752 IEM_MC_ADVANCE_RIP();
7753 IEM_MC_END();
7754 return VINF_SUCCESS;
7755
7756 case IEMMODE_32BIT:
7757 IEM_MC_BEGIN(0, 2);
7758 IEM_MC_LOCAL(uint32_t, u32Value);
7759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7762 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7763 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7764 IEM_MC_ADVANCE_RIP();
7765 IEM_MC_END();
7766 return VINF_SUCCESS;
7767
7768 case IEMMODE_64BIT:
7769 IEM_MC_BEGIN(0, 2);
7770 IEM_MC_LOCAL(uint64_t, u64Value);
7771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7772 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7774 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7775 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7776 IEM_MC_ADVANCE_RIP();
7777 IEM_MC_END();
7778 return VINF_SUCCESS;
7779
7780 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7781 }
7782 }
7783}
7784
7785
7786/** Opcode 0x0f 0xb7. */
7787FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7788{
7789 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7790 IEMOP_HLP_MIN_386();
7791
7792 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7793
7794 /** @todo Not entirely sure how the operand size prefix is handled here,
7795 * assuming that it will be ignored. Would be nice to have a few
7796 * test for this. */
7797 /*
7798 * If rm is denoting a register, no more instruction bytes.
7799 */
7800 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7801 {
7802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7803 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7804 {
7805 IEM_MC_BEGIN(0, 1);
7806 IEM_MC_LOCAL(uint32_t, u32Value);
7807 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7808 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7809 IEM_MC_ADVANCE_RIP();
7810 IEM_MC_END();
7811 }
7812 else
7813 {
7814 IEM_MC_BEGIN(0, 1);
7815 IEM_MC_LOCAL(uint64_t, u64Value);
7816 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7817 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7818 IEM_MC_ADVANCE_RIP();
7819 IEM_MC_END();
7820 }
7821 }
7822 else
7823 {
7824 /*
7825 * We're loading a register from memory.
7826 */
7827 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7828 {
7829 IEM_MC_BEGIN(0, 2);
7830 IEM_MC_LOCAL(uint32_t, u32Value);
7831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7834 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7835 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7836 IEM_MC_ADVANCE_RIP();
7837 IEM_MC_END();
7838 }
7839 else
7840 {
7841 IEM_MC_BEGIN(0, 2);
7842 IEM_MC_LOCAL(uint64_t, u64Value);
7843 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7846 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7847 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7848 IEM_MC_ADVANCE_RIP();
7849 IEM_MC_END();
7850 }
7851 }
7852 return VINF_SUCCESS;
7853}
7854
7855
7856/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7857FNIEMOP_UD_STUB(iemOp_jmpe);
7858
7859
7860/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7861FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
7862{
7863 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
7864 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
7865 return iemOp_InvalidNeedRM(pVCpu);
7866#ifndef TST_IEM_CHECK_MC
7867# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
7868 static const IEMOPBINSIZES s_Native =
7869 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
7870# endif
7871 static const IEMOPBINSIZES s_Fallback =
7872 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
7873#endif
7874 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback));
7875}
7876
7877
7878/**
7879 * @opcode 0xb9
7880 * @opinvalid intel-modrm
7881 * @optest ->
7882 */
7883FNIEMOP_DEF(iemOp_Grp10)
7884{
7885 /*
7886 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7887 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7888 */
7889 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7890 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
7891 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7892}
7893
7894
7895/** Opcode 0x0f 0xba. */
7896FNIEMOP_DEF(iemOp_Grp8)
7897{
7898 IEMOP_HLP_MIN_386();
7899 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7900 PCIEMOPBINSIZES pImpl;
7901 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7902 {
7903 case 0: case 1: case 2: case 3:
7904 /* Both AMD and Intel want full modr/m decoding and imm8. */
7905 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7906 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7907 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7908 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7909 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7910 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7911 }
7912 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7913
7914 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7915 {
7916 /* register destination. */
7917 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7919
7920 switch (pVCpu->iem.s.enmEffOpSize)
7921 {
7922 case IEMMODE_16BIT:
7923 IEM_MC_BEGIN(3, 0);
7924 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7925 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7926 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7927
7928 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7929 IEM_MC_REF_EFLAGS(pEFlags);
7930 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7931
7932 IEM_MC_ADVANCE_RIP();
7933 IEM_MC_END();
7934 return VINF_SUCCESS;
7935
7936 case IEMMODE_32BIT:
7937 IEM_MC_BEGIN(3, 0);
7938 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7939 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7940 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7941
7942 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7943 IEM_MC_REF_EFLAGS(pEFlags);
7944 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7945
7946 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7947 IEM_MC_ADVANCE_RIP();
7948 IEM_MC_END();
7949 return VINF_SUCCESS;
7950
7951 case IEMMODE_64BIT:
7952 IEM_MC_BEGIN(3, 0);
7953 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7954 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7955 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7956
7957 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7958 IEM_MC_REF_EFLAGS(pEFlags);
7959 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7960
7961 IEM_MC_ADVANCE_RIP();
7962 IEM_MC_END();
7963 return VINF_SUCCESS;
7964
7965 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7966 }
7967 }
7968 else
7969 {
7970 /* memory destination. */
7971
7972 uint32_t fAccess;
7973 if (pImpl->pfnLockedU16)
7974 fAccess = IEM_ACCESS_DATA_RW;
7975 else /* BT */
7976 fAccess = IEM_ACCESS_DATA_R;
7977
7978 /** @todo test negative bit offsets! */
7979 switch (pVCpu->iem.s.enmEffOpSize)
7980 {
7981 case IEMMODE_16BIT:
7982 IEM_MC_BEGIN(3, 1);
7983 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7984 IEM_MC_ARG(uint16_t, u16Src, 1);
7985 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7986 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7987
7988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7989 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7990 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7991 if (pImpl->pfnLockedU16)
7992 IEMOP_HLP_DONE_DECODING();
7993 else
7994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7995 IEM_MC_FETCH_EFLAGS(EFlags);
7996 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7997 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7998 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7999 else
8000 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
8001 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
8002
8003 IEM_MC_COMMIT_EFLAGS(EFlags);
8004 IEM_MC_ADVANCE_RIP();
8005 IEM_MC_END();
8006 return VINF_SUCCESS;
8007
8008 case IEMMODE_32BIT:
8009 IEM_MC_BEGIN(3, 1);
8010 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8011 IEM_MC_ARG(uint32_t, u32Src, 1);
8012 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8014
8015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8016 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8017 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
8018 if (pImpl->pfnLockedU16)
8019 IEMOP_HLP_DONE_DECODING();
8020 else
8021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8022 IEM_MC_FETCH_EFLAGS(EFlags);
8023 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8024 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8025 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8026 else
8027 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
8028 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
8029
8030 IEM_MC_COMMIT_EFLAGS(EFlags);
8031 IEM_MC_ADVANCE_RIP();
8032 IEM_MC_END();
8033 return VINF_SUCCESS;
8034
8035 case IEMMODE_64BIT:
8036 IEM_MC_BEGIN(3, 1);
8037 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8038 IEM_MC_ARG(uint64_t, u64Src, 1);
8039 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8041
8042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8043 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8044 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
8045 if (pImpl->pfnLockedU16)
8046 IEMOP_HLP_DONE_DECODING();
8047 else
8048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8049 IEM_MC_FETCH_EFLAGS(EFlags);
8050 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8051 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8052 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8053 else
8054 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
8055 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
8056
8057 IEM_MC_COMMIT_EFLAGS(EFlags);
8058 IEM_MC_ADVANCE_RIP();
8059 IEM_MC_END();
8060 return VINF_SUCCESS;
8061
8062 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8063 }
8064 }
8065}
8066
8067
8068/** Opcode 0x0f 0xbb. */
8069FNIEMOP_DEF(iemOp_btc_Ev_Gv)
8070{
8071 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
8072 IEMOP_HLP_MIN_386();
8073 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
8074}
8075
8076
8077/**
8078 * Common worker for BSF and BSR instructions.
8079 *
8080 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
8081 * the destination register, which means that for 32-bit operations the high
8082 * bits must be left alone.
8083 *
8084 * @param pImpl Pointer to the instruction implementation (assembly).
8085 */
8086FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
8087{
8088 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8089
8090 /*
8091 * If rm is denoting a register, no more instruction bytes.
8092 */
8093 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8094 {
8095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8096 switch (pVCpu->iem.s.enmEffOpSize)
8097 {
8098 case IEMMODE_16BIT:
8099 IEM_MC_BEGIN(3, 0);
8100 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8101 IEM_MC_ARG(uint16_t, u16Src, 1);
8102 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8103
8104 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8105 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8106 IEM_MC_REF_EFLAGS(pEFlags);
8107 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8108
8109 IEM_MC_ADVANCE_RIP();
8110 IEM_MC_END();
8111 break;
8112
8113 case IEMMODE_32BIT:
8114 IEM_MC_BEGIN(3, 0);
8115 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8116 IEM_MC_ARG(uint32_t, u32Src, 1);
8117 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8118
8119 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8120 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8121 IEM_MC_REF_EFLAGS(pEFlags);
8122 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8123 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8124 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8125 IEM_MC_ENDIF();
8126 IEM_MC_ADVANCE_RIP();
8127 IEM_MC_END();
8128 break;
8129
8130 case IEMMODE_64BIT:
8131 IEM_MC_BEGIN(3, 0);
8132 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8133 IEM_MC_ARG(uint64_t, u64Src, 1);
8134 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8135
8136 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8137 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8138 IEM_MC_REF_EFLAGS(pEFlags);
8139 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8140
8141 IEM_MC_ADVANCE_RIP();
8142 IEM_MC_END();
8143 break;
8144 }
8145 }
8146 else
8147 {
8148 /*
8149 * We're accessing memory.
8150 */
8151 switch (pVCpu->iem.s.enmEffOpSize)
8152 {
8153 case IEMMODE_16BIT:
8154 IEM_MC_BEGIN(3, 1);
8155 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8156 IEM_MC_ARG(uint16_t, u16Src, 1);
8157 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8159
8160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8162 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8163 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8164 IEM_MC_REF_EFLAGS(pEFlags);
8165 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8166
8167 IEM_MC_ADVANCE_RIP();
8168 IEM_MC_END();
8169 break;
8170
8171 case IEMMODE_32BIT:
8172 IEM_MC_BEGIN(3, 1);
8173 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8174 IEM_MC_ARG(uint32_t, u32Src, 1);
8175 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8177
8178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8180 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8181 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8182 IEM_MC_REF_EFLAGS(pEFlags);
8183 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8184
8185 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8186 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8187 IEM_MC_ENDIF();
8188 IEM_MC_ADVANCE_RIP();
8189 IEM_MC_END();
8190 break;
8191
8192 case IEMMODE_64BIT:
8193 IEM_MC_BEGIN(3, 1);
8194 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8195 IEM_MC_ARG(uint64_t, u64Src, 1);
8196 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8198
8199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8201 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8202 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8203 IEM_MC_REF_EFLAGS(pEFlags);
8204 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8205
8206 IEM_MC_ADVANCE_RIP();
8207 IEM_MC_END();
8208 break;
8209 }
8210 }
8211 return VINF_SUCCESS;
8212}
8213
8214
8215/** Opcode 0x0f 0xbc. */
8216FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
8217{
8218 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
8219 IEMOP_HLP_MIN_386();
8220 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
8221 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
8222}
8223
8224
8225/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
8226FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
8227{
8228 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
8229 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
8230 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
8231
8232#ifndef TST_IEM_CHECK_MC
8233 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
8234 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
8235 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
8236 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
8237 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
8238 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
8239 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
8240 {
8241 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
8242 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
8243 };
8244#endif
8245 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
8246 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
8247 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
8248}
8249
8250
8251/** Opcode 0x0f 0xbd. */
8252FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
8253{
8254 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
8255 IEMOP_HLP_MIN_386();
8256 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
8257 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
8258}
8259
8260
8261/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
8262FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
8263{
8264 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
8265 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
8266 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
8267
8268#ifndef TST_IEM_CHECK_MC
8269 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
8270 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
8271 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
8272 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
8273 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
8274 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
8275 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
8276 {
8277 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
8278 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
8279 };
8280#endif
8281 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
8282 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
8283 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
8284}
8285
8286
8287
8288/** Opcode 0x0f 0xbe. */
8289FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
8290{
8291 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
8292 IEMOP_HLP_MIN_386();
8293
8294 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8295
8296 /*
8297 * If rm is denoting a register, no more instruction bytes.
8298 */
8299 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8300 {
8301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8302 switch (pVCpu->iem.s.enmEffOpSize)
8303 {
8304 case IEMMODE_16BIT:
8305 IEM_MC_BEGIN(0, 1);
8306 IEM_MC_LOCAL(uint16_t, u16Value);
8307 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8308 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
8309 IEM_MC_ADVANCE_RIP();
8310 IEM_MC_END();
8311 return VINF_SUCCESS;
8312
8313 case IEMMODE_32BIT:
8314 IEM_MC_BEGIN(0, 1);
8315 IEM_MC_LOCAL(uint32_t, u32Value);
8316 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8317 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8318 IEM_MC_ADVANCE_RIP();
8319 IEM_MC_END();
8320 return VINF_SUCCESS;
8321
8322 case IEMMODE_64BIT:
8323 IEM_MC_BEGIN(0, 1);
8324 IEM_MC_LOCAL(uint64_t, u64Value);
8325 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8326 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8327 IEM_MC_ADVANCE_RIP();
8328 IEM_MC_END();
8329 return VINF_SUCCESS;
8330
8331 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8332 }
8333 }
8334 else
8335 {
8336 /*
8337 * We're loading a register from memory.
8338 */
8339 switch (pVCpu->iem.s.enmEffOpSize)
8340 {
8341 case IEMMODE_16BIT:
8342 IEM_MC_BEGIN(0, 2);
8343 IEM_MC_LOCAL(uint16_t, u16Value);
8344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8347 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8348 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
8349 IEM_MC_ADVANCE_RIP();
8350 IEM_MC_END();
8351 return VINF_SUCCESS;
8352
8353 case IEMMODE_32BIT:
8354 IEM_MC_BEGIN(0, 2);
8355 IEM_MC_LOCAL(uint32_t, u32Value);
8356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8359 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8360 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8361 IEM_MC_ADVANCE_RIP();
8362 IEM_MC_END();
8363 return VINF_SUCCESS;
8364
8365 case IEMMODE_64BIT:
8366 IEM_MC_BEGIN(0, 2);
8367 IEM_MC_LOCAL(uint64_t, u64Value);
8368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8371 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8372 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8373 IEM_MC_ADVANCE_RIP();
8374 IEM_MC_END();
8375 return VINF_SUCCESS;
8376
8377 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8378 }
8379 }
8380}
8381
8382
8383/** Opcode 0x0f 0xbf. */
8384FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
8385{
8386 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
8387 IEMOP_HLP_MIN_386();
8388
8389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8390
8391 /** @todo Not entirely sure how the operand size prefix is handled here,
8392 * assuming that it will be ignored. Would be nice to have a few
8393 * test for this. */
8394 /*
8395 * If rm is denoting a register, no more instruction bytes.
8396 */
8397 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8398 {
8399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8400 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8401 {
8402 IEM_MC_BEGIN(0, 1);
8403 IEM_MC_LOCAL(uint32_t, u32Value);
8404 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8405 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8406 IEM_MC_ADVANCE_RIP();
8407 IEM_MC_END();
8408 }
8409 else
8410 {
8411 IEM_MC_BEGIN(0, 1);
8412 IEM_MC_LOCAL(uint64_t, u64Value);
8413 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8414 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8415 IEM_MC_ADVANCE_RIP();
8416 IEM_MC_END();
8417 }
8418 }
8419 else
8420 {
8421 /*
8422 * We're loading a register from memory.
8423 */
8424 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8425 {
8426 IEM_MC_BEGIN(0, 2);
8427 IEM_MC_LOCAL(uint32_t, u32Value);
8428 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8431 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8432 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8433 IEM_MC_ADVANCE_RIP();
8434 IEM_MC_END();
8435 }
8436 else
8437 {
8438 IEM_MC_BEGIN(0, 2);
8439 IEM_MC_LOCAL(uint64_t, u64Value);
8440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8441 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8443 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8444 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8445 IEM_MC_ADVANCE_RIP();
8446 IEM_MC_END();
8447 }
8448 }
8449 return VINF_SUCCESS;
8450}
8451
8452
8453/** Opcode 0x0f 0xc0. */
8454FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
8455{
8456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8457 IEMOP_HLP_MIN_486();
8458 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
8459
8460 /*
8461 * If rm is denoting a register, no more instruction bytes.
8462 */
8463 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8464 {
8465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8466
8467 IEM_MC_BEGIN(3, 0);
8468 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8469 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8470 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8471
8472 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8473 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8474 IEM_MC_REF_EFLAGS(pEFlags);
8475 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8476
8477 IEM_MC_ADVANCE_RIP();
8478 IEM_MC_END();
8479 }
8480 else
8481 {
8482 /*
8483 * We're accessing memory.
8484 */
8485 IEM_MC_BEGIN(3, 3);
8486 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8487 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8488 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8489 IEM_MC_LOCAL(uint8_t, u8RegCopy);
8490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8491
8492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8493 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8494 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8495 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
8496 IEM_MC_FETCH_EFLAGS(EFlags);
8497 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8498 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8499 else
8500 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
8501
8502 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
8503 IEM_MC_COMMIT_EFLAGS(EFlags);
8504 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
8505 IEM_MC_ADVANCE_RIP();
8506 IEM_MC_END();
8507 return VINF_SUCCESS;
8508 }
8509 return VINF_SUCCESS;
8510}
8511
8512
8513/** Opcode 0x0f 0xc1. */
8514FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
8515{
8516 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
8517 IEMOP_HLP_MIN_486();
8518 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8519
8520 /*
8521 * If rm is denoting a register, no more instruction bytes.
8522 */
8523 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8524 {
8525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8526
8527 switch (pVCpu->iem.s.enmEffOpSize)
8528 {
8529 case IEMMODE_16BIT:
8530 IEM_MC_BEGIN(3, 0);
8531 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8532 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8533 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8534
8535 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8536 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8537 IEM_MC_REF_EFLAGS(pEFlags);
8538 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8539
8540 IEM_MC_ADVANCE_RIP();
8541 IEM_MC_END();
8542 return VINF_SUCCESS;
8543
8544 case IEMMODE_32BIT:
8545 IEM_MC_BEGIN(3, 0);
8546 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8547 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8548 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8549
8550 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8551 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8552 IEM_MC_REF_EFLAGS(pEFlags);
8553 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8554
8555 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8556 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
8557 IEM_MC_ADVANCE_RIP();
8558 IEM_MC_END();
8559 return VINF_SUCCESS;
8560
8561 case IEMMODE_64BIT:
8562 IEM_MC_BEGIN(3, 0);
8563 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8564 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8565 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8566
8567 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8568 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8569 IEM_MC_REF_EFLAGS(pEFlags);
8570 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8571
8572 IEM_MC_ADVANCE_RIP();
8573 IEM_MC_END();
8574 return VINF_SUCCESS;
8575
8576 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8577 }
8578 }
8579 else
8580 {
8581 /*
8582 * We're accessing memory.
8583 */
8584 switch (pVCpu->iem.s.enmEffOpSize)
8585 {
8586 case IEMMODE_16BIT:
8587 IEM_MC_BEGIN(3, 3);
8588 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8589 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8590 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8591 IEM_MC_LOCAL(uint16_t, u16RegCopy);
8592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8593
8594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8595 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8596 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8597 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
8598 IEM_MC_FETCH_EFLAGS(EFlags);
8599 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8600 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8601 else
8602 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
8603
8604 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8605 IEM_MC_COMMIT_EFLAGS(EFlags);
8606 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
8607 IEM_MC_ADVANCE_RIP();
8608 IEM_MC_END();
8609 return VINF_SUCCESS;
8610
8611 case IEMMODE_32BIT:
8612 IEM_MC_BEGIN(3, 3);
8613 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8614 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8615 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8616 IEM_MC_LOCAL(uint32_t, u32RegCopy);
8617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8618
8619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8620 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8621 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8622 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
8623 IEM_MC_FETCH_EFLAGS(EFlags);
8624 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8625 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8626 else
8627 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
8628
8629 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8630 IEM_MC_COMMIT_EFLAGS(EFlags);
8631 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
8632 IEM_MC_ADVANCE_RIP();
8633 IEM_MC_END();
8634 return VINF_SUCCESS;
8635
8636 case IEMMODE_64BIT:
8637 IEM_MC_BEGIN(3, 3);
8638 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8639 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8640 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8641 IEM_MC_LOCAL(uint64_t, u64RegCopy);
8642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8643
8644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8645 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8646 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8647 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
8648 IEM_MC_FETCH_EFLAGS(EFlags);
8649 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8650 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8651 else
8652 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
8653
8654 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8655 IEM_MC_COMMIT_EFLAGS(EFlags);
8656 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
8657 IEM_MC_ADVANCE_RIP();
8658 IEM_MC_END();
8659 return VINF_SUCCESS;
8660
8661 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8662 }
8663 }
8664}
8665
8666
8667/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
8668FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
8669/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
8670FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
8671/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
8672FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
8673/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
8674FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
8675
8676
8677/** Opcode 0x0f 0xc3. */
8678FNIEMOP_DEF(iemOp_movnti_My_Gy)
8679{
8680 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
8681
8682 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8683
8684 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
8685 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8686 {
8687 switch (pVCpu->iem.s.enmEffOpSize)
8688 {
8689 case IEMMODE_32BIT:
8690 IEM_MC_BEGIN(0, 2);
8691 IEM_MC_LOCAL(uint32_t, u32Value);
8692 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8693
8694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8696 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8697 return IEMOP_RAISE_INVALID_OPCODE();
8698
8699 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8700 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
8701 IEM_MC_ADVANCE_RIP();
8702 IEM_MC_END();
8703 break;
8704
8705 case IEMMODE_64BIT:
8706 IEM_MC_BEGIN(0, 2);
8707 IEM_MC_LOCAL(uint64_t, u64Value);
8708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8709
8710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8712 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8713 return IEMOP_RAISE_INVALID_OPCODE();
8714
8715 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8716 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
8717 IEM_MC_ADVANCE_RIP();
8718 IEM_MC_END();
8719 break;
8720
8721 case IEMMODE_16BIT:
8722 /** @todo check this form. */
8723 return IEMOP_RAISE_INVALID_OPCODE();
8724 }
8725 }
8726 else
8727 return IEMOP_RAISE_INVALID_OPCODE();
8728 return VINF_SUCCESS;
8729}
8730/* Opcode 0x66 0x0f 0xc3 - invalid */
8731/* Opcode 0xf3 0x0f 0xc3 - invalid */
8732/* Opcode 0xf2 0x0f 0xc3 - invalid */
8733
8734/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
8735FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
8736/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
8737FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
8738/* Opcode 0xf3 0x0f 0xc4 - invalid */
8739/* Opcode 0xf2 0x0f 0xc4 - invalid */
8740
8741/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
8742FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
8743/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
8744FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
8745/* Opcode 0xf3 0x0f 0xc5 - invalid */
8746/* Opcode 0xf2 0x0f 0xc5 - invalid */
8747
8748/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
8749FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
8750/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
8751FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
8752/* Opcode 0xf3 0x0f 0xc6 - invalid */
8753/* Opcode 0xf2 0x0f 0xc6 - invalid */
8754
8755
8756/** Opcode 0x0f 0xc7 !11/1. */
8757FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
8758{
8759 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
8760
8761 IEM_MC_BEGIN(4, 3);
8762 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
8763 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
8764 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
8765 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8766 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
8767 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
8768 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8769
8770 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8771 IEMOP_HLP_DONE_DECODING();
8772 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8773
8774 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
8775 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
8776 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
8777
8778 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
8779 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
8780 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
8781
8782 IEM_MC_FETCH_EFLAGS(EFlags);
8783 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8784 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8785 else
8786 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8787
8788 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
8789 IEM_MC_COMMIT_EFLAGS(EFlags);
8790 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8791 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
8792 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
8793 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
8794 IEM_MC_ENDIF();
8795 IEM_MC_ADVANCE_RIP();
8796
8797 IEM_MC_END();
8798 return VINF_SUCCESS;
8799}
8800
8801
8802/** Opcode REX.W 0x0f 0xc7 !11/1. */
8803FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
8804{
8805 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
8806 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8807 {
8808#if 0
8809 RT_NOREF(bRm);
8810 IEMOP_BITCH_ABOUT_STUB();
8811 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8812#else
8813 IEM_MC_BEGIN(4, 3);
8814 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
8815 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
8816 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
8817 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8818 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
8819 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
8820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8821
8822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8823 IEMOP_HLP_DONE_DECODING();
8824 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
8825 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8826
8827 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
8828 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
8829 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
8830
8831 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
8832 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
8833 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
8834
8835 IEM_MC_FETCH_EFLAGS(EFlags);
8836# if defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64)
8837# if defined(RT_ARCH_AMD64)
8838 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8839# endif
8840 {
8841 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8842 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8843 else
8844 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8845 }
8846# if defined(RT_ARCH_AMD64)
8847 else
8848# endif
8849# endif
8850# if !defined(RT_ARCH_ARM64) /** @todo may need this for unaligned accesses... */
8851 {
8852 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
8853 accesses and not all all atomic, which works fine on in UNI CPU guest
8854 configuration (ignoring DMA). If guest SMP is active we have no choice
8855 but to use a rendezvous callback here. Sigh. */
8856 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
8857 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8858 else
8859 {
8860 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8861 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
8862 }
8863 }
8864# endif
8865
8866 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
8867 IEM_MC_COMMIT_EFLAGS(EFlags);
8868 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8869 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
8870 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
8871 IEM_MC_ENDIF();
8872 IEM_MC_ADVANCE_RIP();
8873
8874 IEM_MC_END();
8875 return VINF_SUCCESS;
8876#endif
8877 }
8878 Log(("cmpxchg16b -> #UD\n"));
8879 return IEMOP_RAISE_INVALID_OPCODE();
8880}
8881
8882FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
8883{
8884 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
8885 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
8886 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
8887}
8888
8889/** Opcode 0x0f 0xc7 11/6. */
8890FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
8891
8892/** Opcode 0x0f 0xc7 !11/6. */
8893#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8894FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
8895{
8896 IEMOP_MNEMONIC(vmptrld, "vmptrld");
8897 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
8898 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
8899 IEM_MC_BEGIN(2, 0);
8900 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8901 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
8902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8903 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
8904 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8905 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
8906 IEM_MC_END();
8907 return VINF_SUCCESS;
8908}
8909#else
8910FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
8911#endif
8912
8913/** Opcode 0x66 0x0f 0xc7 !11/6. */
8914#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8915FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
8916{
8917 IEMOP_MNEMONIC(vmclear, "vmclear");
8918 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
8919 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
8920 IEM_MC_BEGIN(2, 0);
8921 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8922 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
8923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8924 IEMOP_HLP_DONE_DECODING();
8925 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8926 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
8927 IEM_MC_END();
8928 return VINF_SUCCESS;
8929}
8930#else
8931FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
8932#endif
8933
8934/** Opcode 0xf3 0x0f 0xc7 !11/6. */
8935#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8936FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
8937{
8938 IEMOP_MNEMONIC(vmxon, "vmxon");
8939 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
8940 IEM_MC_BEGIN(2, 0);
8941 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8942 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
8943 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8944 IEMOP_HLP_DONE_DECODING();
8945 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8946 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
8947 IEM_MC_END();
8948 return VINF_SUCCESS;
8949}
8950#else
8951FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
8952#endif
8953
8954/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
8955#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8956FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
8957{
8958 IEMOP_MNEMONIC(vmptrst, "vmptrst");
8959 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
8960 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
8961 IEM_MC_BEGIN(2, 0);
8962 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8963 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
8964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8965 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
8966 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8967 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
8968 IEM_MC_END();
8969 return VINF_SUCCESS;
8970}
8971#else
8972FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
8973#endif
8974
8975/** Opcode 0x0f 0xc7 11/7. */
8976FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
8977
8978
8979/**
8980 * Group 9 jump table for register variant.
8981 */
8982IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
8983{ /* pfx: none, 066h, 0f3h, 0f2h */
8984 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8985 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
8986 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8987 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8988 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8989 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8990 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8991 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8992};
8993AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
8994
8995
8996/**
8997 * Group 9 jump table for memory variant.
8998 */
8999IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
9000{ /* pfx: none, 066h, 0f3h, 0f2h */
9001 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
9002 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
9003 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
9004 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
9005 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
9006 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
9007 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
9008 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9009};
9010AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
9011
9012
9013/** Opcode 0x0f 0xc7. */
9014FNIEMOP_DEF(iemOp_Grp9)
9015{
9016 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
9017 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9018 /* register, register */
9019 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
9020 + pVCpu->iem.s.idxPrefix], bRm);
9021 /* memory, register */
9022 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
9023 + pVCpu->iem.s.idxPrefix], bRm);
9024}
9025
9026
9027/**
9028 * Common 'bswap register' helper.
9029 */
9030FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
9031{
9032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9033 switch (pVCpu->iem.s.enmEffOpSize)
9034 {
9035 case IEMMODE_16BIT:
9036 IEM_MC_BEGIN(1, 0);
9037 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9038 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
9039 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
9040 IEM_MC_ADVANCE_RIP();
9041 IEM_MC_END();
9042 return VINF_SUCCESS;
9043
9044 case IEMMODE_32BIT:
9045 IEM_MC_BEGIN(1, 0);
9046 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9047 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
9048 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9049 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
9050 IEM_MC_ADVANCE_RIP();
9051 IEM_MC_END();
9052 return VINF_SUCCESS;
9053
9054 case IEMMODE_64BIT:
9055 IEM_MC_BEGIN(1, 0);
9056 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9057 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
9058 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
9059 IEM_MC_ADVANCE_RIP();
9060 IEM_MC_END();
9061 return VINF_SUCCESS;
9062
9063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9064 }
9065}
9066
9067
9068/** Opcode 0x0f 0xc8. */
9069FNIEMOP_DEF(iemOp_bswap_rAX_r8)
9070{
9071 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
9072 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
9073 prefix. REX.B is the correct prefix it appears. For a parallel
9074 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
9075 IEMOP_HLP_MIN_486();
9076 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
9077}
9078
9079
9080/** Opcode 0x0f 0xc9. */
9081FNIEMOP_DEF(iemOp_bswap_rCX_r9)
9082{
9083 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
9084 IEMOP_HLP_MIN_486();
9085 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
9086}
9087
9088
9089/** Opcode 0x0f 0xca. */
9090FNIEMOP_DEF(iemOp_bswap_rDX_r10)
9091{
9092 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
9093 IEMOP_HLP_MIN_486();
9094 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
9095}
9096
9097
9098/** Opcode 0x0f 0xcb. */
9099FNIEMOP_DEF(iemOp_bswap_rBX_r11)
9100{
9101 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
9102 IEMOP_HLP_MIN_486();
9103 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
9104}
9105
9106
9107/** Opcode 0x0f 0xcc. */
9108FNIEMOP_DEF(iemOp_bswap_rSP_r12)
9109{
9110 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
9111 IEMOP_HLP_MIN_486();
9112 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
9113}
9114
9115
9116/** Opcode 0x0f 0xcd. */
9117FNIEMOP_DEF(iemOp_bswap_rBP_r13)
9118{
9119 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
9120 IEMOP_HLP_MIN_486();
9121 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
9122}
9123
9124
9125/** Opcode 0x0f 0xce. */
9126FNIEMOP_DEF(iemOp_bswap_rSI_r14)
9127{
9128 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
9129 IEMOP_HLP_MIN_486();
9130 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
9131}
9132
9133
9134/** Opcode 0x0f 0xcf. */
9135FNIEMOP_DEF(iemOp_bswap_rDI_r15)
9136{
9137 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
9138 IEMOP_HLP_MIN_486();
9139 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
9140}
9141
9142
9143/* Opcode 0x0f 0xd0 - invalid */
9144/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
9145FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
9146/* Opcode 0xf3 0x0f 0xd0 - invalid */
9147/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
9148FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
9149
9150/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
9151FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
9152/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
9153FNIEMOP_STUB(iemOp_psrlw_Vx_W);
9154/* Opcode 0xf3 0x0f 0xd1 - invalid */
9155/* Opcode 0xf2 0x0f 0xd1 - invalid */
9156
9157/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
9158FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
9159/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
9160FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
9161/* Opcode 0xf3 0x0f 0xd2 - invalid */
9162/* Opcode 0xf2 0x0f 0xd2 - invalid */
9163
9164/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
9165FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
9166/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
9167FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
9168/* Opcode 0xf3 0x0f 0xd3 - invalid */
9169/* Opcode 0xf2 0x0f 0xd3 - invalid */
9170
9171/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
9172FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
9173/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
9174FNIEMOP_STUB(iemOp_paddq_Vx_W);
9175/* Opcode 0xf3 0x0f 0xd4 - invalid */
9176/* Opcode 0xf2 0x0f 0xd4 - invalid */
9177
9178/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
9179FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
9180/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
9181FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
9182/* Opcode 0xf3 0x0f 0xd5 - invalid */
9183/* Opcode 0xf2 0x0f 0xd5 - invalid */
9184
9185/* Opcode 0x0f 0xd6 - invalid */
9186
9187/**
9188 * @opcode 0xd6
9189 * @oppfx 0x66
9190 * @opcpuid sse2
9191 * @opgroup og_sse2_pcksclr_datamove
9192 * @opxcpttype none
9193 * @optest op1=-1 op2=2 -> op1=2
9194 * @optest op1=0 op2=-42 -> op1=-42
9195 */
9196FNIEMOP_DEF(iemOp_movq_Wq_Vq)
9197{
9198 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9199 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9200 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9201 {
9202 /*
9203 * Register, register.
9204 */
9205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9206 IEM_MC_BEGIN(0, 2);
9207 IEM_MC_LOCAL(uint64_t, uSrc);
9208
9209 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9210 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
9211
9212 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9213 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
9214
9215 IEM_MC_ADVANCE_RIP();
9216 IEM_MC_END();
9217 }
9218 else
9219 {
9220 /*
9221 * Memory, register.
9222 */
9223 IEM_MC_BEGIN(0, 2);
9224 IEM_MC_LOCAL(uint64_t, uSrc);
9225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9226
9227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9229 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9230 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9231
9232 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9233 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9234
9235 IEM_MC_ADVANCE_RIP();
9236 IEM_MC_END();
9237 }
9238 return VINF_SUCCESS;
9239}
9240
9241
9242/**
9243 * @opcode 0xd6
9244 * @opcodesub 11 mr/reg
9245 * @oppfx f3
9246 * @opcpuid sse2
9247 * @opgroup og_sse2_simdint_datamove
9248 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
9249 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
9250 */
9251FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
9252{
9253 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9254 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9255 {
9256 /*
9257 * Register, register.
9258 */
9259 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9261 IEM_MC_BEGIN(0, 1);
9262 IEM_MC_LOCAL(uint64_t, uSrc);
9263
9264 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9265 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9266
9267 IEM_MC_FETCH_MREG_U64(uSrc, bRm & X86_MODRM_RM_MASK);
9268 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
9269 IEM_MC_FPU_TO_MMX_MODE();
9270
9271 IEM_MC_ADVANCE_RIP();
9272 IEM_MC_END();
9273 return VINF_SUCCESS;
9274 }
9275
9276 /**
9277 * @opdone
9278 * @opmnemonic udf30fd6mem
9279 * @opcode 0xd6
9280 * @opcodesub !11 mr/reg
9281 * @oppfx f3
9282 * @opunused intel-modrm
9283 * @opcpuid sse
9284 * @optest ->
9285 */
9286 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
9287}
9288
9289
9290/**
9291 * @opcode 0xd6
9292 * @opcodesub 11 mr/reg
9293 * @oppfx f2
9294 * @opcpuid sse2
9295 * @opgroup og_sse2_simdint_datamove
9296 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
9297 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
9298 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
9299 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
9300 * @optest op1=-42 op2=0xfedcba9876543210
9301 * -> op1=0xfedcba9876543210 ftw=0xff
9302 */
9303FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
9304{
9305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9306 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9307 {
9308 /*
9309 * Register, register.
9310 */
9311 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9313 IEM_MC_BEGIN(0, 1);
9314 IEM_MC_LOCAL(uint64_t, uSrc);
9315
9316 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9317 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9318
9319 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9320 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, uSrc);
9321 IEM_MC_FPU_TO_MMX_MODE();
9322
9323 IEM_MC_ADVANCE_RIP();
9324 IEM_MC_END();
9325 return VINF_SUCCESS;
9326 }
9327
9328 /**
9329 * @opdone
9330 * @opmnemonic udf20fd6mem
9331 * @opcode 0xd6
9332 * @opcodesub !11 mr/reg
9333 * @oppfx f2
9334 * @opunused intel-modrm
9335 * @opcpuid sse
9336 * @optest ->
9337 */
9338 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
9339}
9340
9341/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
9342FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
9343{
9344 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
9345 /** @todo testcase: Check that the instruction implicitly clears the high
9346 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
9347 * and opcode modifications are made to work with the whole width (not
9348 * just 128). */
9349 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
9350 /* Docs says register only. */
9351 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9352 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
9353 {
9354 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
9355 IEM_MC_BEGIN(2, 0);
9356 IEM_MC_ARG(uint64_t *, pDst, 0);
9357 IEM_MC_ARG(uint64_t const *, pSrc, 1);
9358 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
9359 IEM_MC_PREPARE_FPU_USAGE();
9360 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9361 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
9362 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
9363 IEM_MC_ADVANCE_RIP();
9364 IEM_MC_END();
9365 return VINF_SUCCESS;
9366 }
9367 return IEMOP_RAISE_INVALID_OPCODE();
9368}
9369
9370/** Opcode 0x66 0x0f 0xd7 - */
9371FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
9372{
9373 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
9374 /** @todo testcase: Check that the instruction implicitly clears the high
9375 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
9376 * and opcode modifications are made to work with the whole width (not
9377 * just 128). */
9378 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
9379 /* Docs says register only. */
9380 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9381 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
9382 {
9383 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
9384 IEM_MC_BEGIN(2, 0);
9385 IEM_MC_ARG(uint64_t *, pDst, 0);
9386 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
9387 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9388 IEM_MC_PREPARE_SSE_USAGE();
9389 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9390 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9391 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
9392 IEM_MC_ADVANCE_RIP();
9393 IEM_MC_END();
9394 return VINF_SUCCESS;
9395 }
9396 return IEMOP_RAISE_INVALID_OPCODE();
9397}
9398
9399/* Opcode 0xf3 0x0f 0xd7 - invalid */
9400/* Opcode 0xf2 0x0f 0xd7 - invalid */
9401
9402
9403/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
9404FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
9405/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
9406FNIEMOP_STUB(iemOp_psubusb_Vx_W);
9407/* Opcode 0xf3 0x0f 0xd8 - invalid */
9408/* Opcode 0xf2 0x0f 0xd8 - invalid */
9409
9410/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
9411FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
9412/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
9413FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
9414/* Opcode 0xf3 0x0f 0xd9 - invalid */
9415/* Opcode 0xf2 0x0f 0xd9 - invalid */
9416
9417/** Opcode 0x0f 0xda - pminub Pq, Qq */
9418FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
9419/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
9420FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
9421/* Opcode 0xf3 0x0f 0xda - invalid */
9422/* Opcode 0xf2 0x0f 0xda - invalid */
9423
9424/** Opcode 0x0f 0xdb - pand Pq, Qq */
9425FNIEMOP_DEF(iemOp_pand_Pq_Qq)
9426{
9427 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9428 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pand);
9429}
9430
9431
9432/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
9433FNIEMOP_DEF(iemOp_pand_Vx_Wx)
9434{
9435 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9436 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pand);
9437}
9438
9439
9440/* Opcode 0xf3 0x0f 0xdb - invalid */
9441/* Opcode 0xf2 0x0f 0xdb - invalid */
9442
9443/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
9444FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
9445/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
9446FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
9447/* Opcode 0xf3 0x0f 0xdc - invalid */
9448/* Opcode 0xf2 0x0f 0xdc - invalid */
9449
9450/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
9451FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
9452/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
9453FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
9454/* Opcode 0xf3 0x0f 0xdd - invalid */
9455/* Opcode 0xf2 0x0f 0xdd - invalid */
9456
9457/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
9458FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
9459/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
9460FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
9461/* Opcode 0xf3 0x0f 0xde - invalid */
9462/* Opcode 0xf2 0x0f 0xde - invalid */
9463
9464
9465/** Opcode 0x0f 0xdf - pandn Pq, Qq */
9466FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
9467{
9468 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9469 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pandn);
9470}
9471
9472
9473/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
9474FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
9475{
9476 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9477 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pandn);
9478}
9479
9480
9481/* Opcode 0xf3 0x0f 0xdf - invalid */
9482/* Opcode 0xf2 0x0f 0xdf - invalid */
9483
9484/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
9485FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
9486/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
9487FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
9488/* Opcode 0xf3 0x0f 0xe0 - invalid */
9489/* Opcode 0xf2 0x0f 0xe0 - invalid */
9490
9491/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
9492FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
9493/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
9494FNIEMOP_STUB(iemOp_psraw_Vx_W);
9495/* Opcode 0xf3 0x0f 0xe1 - invalid */
9496/* Opcode 0xf2 0x0f 0xe1 - invalid */
9497
9498/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
9499FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
9500/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
9501FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
9502/* Opcode 0xf3 0x0f 0xe2 - invalid */
9503/* Opcode 0xf2 0x0f 0xe2 - invalid */
9504
9505/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
9506FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
9507/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
9508FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
9509/* Opcode 0xf3 0x0f 0xe3 - invalid */
9510/* Opcode 0xf2 0x0f 0xe3 - invalid */
9511
9512/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
9513FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
9514/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
9515FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
9516/* Opcode 0xf3 0x0f 0xe4 - invalid */
9517/* Opcode 0xf2 0x0f 0xe4 - invalid */
9518
9519/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
9520FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
9521/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
9522FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
9523/* Opcode 0xf3 0x0f 0xe5 - invalid */
9524/* Opcode 0xf2 0x0f 0xe5 - invalid */
9525
9526/* Opcode 0x0f 0xe6 - invalid */
9527/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
9528FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
9529/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
9530FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
9531/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
9532FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
9533
9534
9535/**
9536 * @opcode 0xe7
9537 * @opcodesub !11 mr/reg
9538 * @oppfx none
9539 * @opcpuid sse
9540 * @opgroup og_sse1_cachect
9541 * @opxcpttype none
9542 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
9543 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
9544 */
9545FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
9546{
9547 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9548 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9549 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9550 {
9551 /* Register, memory. */
9552 IEM_MC_BEGIN(0, 2);
9553 IEM_MC_LOCAL(uint64_t, uSrc);
9554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9555
9556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9558 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
9559 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9560
9561 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9562 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9563 IEM_MC_FPU_TO_MMX_MODE();
9564
9565 IEM_MC_ADVANCE_RIP();
9566 IEM_MC_END();
9567 return VINF_SUCCESS;
9568 }
9569 /**
9570 * @opdone
9571 * @opmnemonic ud0fe7reg
9572 * @opcode 0xe7
9573 * @opcodesub 11 mr/reg
9574 * @oppfx none
9575 * @opunused immediate
9576 * @opcpuid sse
9577 * @optest ->
9578 */
9579 return IEMOP_RAISE_INVALID_OPCODE();
9580}
9581
9582/**
9583 * @opcode 0xe7
9584 * @opcodesub !11 mr/reg
9585 * @oppfx 0x66
9586 * @opcpuid sse2
9587 * @opgroup og_sse2_cachect
9588 * @opxcpttype 1
9589 * @optest op1=-1 op2=2 -> op1=2
9590 * @optest op1=0 op2=-42 -> op1=-42
9591 */
9592FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
9593{
9594 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9595 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9596 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9597 {
9598 /* Register, memory. */
9599 IEM_MC_BEGIN(0, 2);
9600 IEM_MC_LOCAL(RTUINT128U, uSrc);
9601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9602
9603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9605 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9606 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9607
9608 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9609 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9610
9611 IEM_MC_ADVANCE_RIP();
9612 IEM_MC_END();
9613 return VINF_SUCCESS;
9614 }
9615
9616 /**
9617 * @opdone
9618 * @opmnemonic ud660fe7reg
9619 * @opcode 0xe7
9620 * @opcodesub 11 mr/reg
9621 * @oppfx 0x66
9622 * @opunused immediate
9623 * @opcpuid sse
9624 * @optest ->
9625 */
9626 return IEMOP_RAISE_INVALID_OPCODE();
9627}
9628
9629/* Opcode 0xf3 0x0f 0xe7 - invalid */
9630/* Opcode 0xf2 0x0f 0xe7 - invalid */
9631
9632
9633/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
9634FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
9635/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
9636FNIEMOP_STUB(iemOp_psubsb_Vx_W);
9637/* Opcode 0xf3 0x0f 0xe8 - invalid */
9638/* Opcode 0xf2 0x0f 0xe8 - invalid */
9639
9640/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
9641FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
9642/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
9643FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
9644/* Opcode 0xf3 0x0f 0xe9 - invalid */
9645/* Opcode 0xf2 0x0f 0xe9 - invalid */
9646
9647/** Opcode 0x0f 0xea - pminsw Pq, Qq */
9648FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
9649/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
9650FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
9651/* Opcode 0xf3 0x0f 0xea - invalid */
9652/* Opcode 0xf2 0x0f 0xea - invalid */
9653
9654
9655/** Opcode 0x0f 0xeb - por Pq, Qq */
9656FNIEMOP_DEF(iemOp_por_Pq_Qq)
9657{
9658 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9659 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_por);
9660}
9661
9662
9663/** Opcode 0x66 0x0f 0xeb - por Vx, W */
9664FNIEMOP_DEF(iemOp_por_Vx_W)
9665{
9666 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9667 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_por);
9668}
9669
9670
9671/* Opcode 0xf3 0x0f 0xeb - invalid */
9672/* Opcode 0xf2 0x0f 0xeb - invalid */
9673
9674/** Opcode 0x0f 0xec - paddsb Pq, Qq */
9675FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
9676/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
9677FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
9678/* Opcode 0xf3 0x0f 0xec - invalid */
9679/* Opcode 0xf2 0x0f 0xec - invalid */
9680
9681/** Opcode 0x0f 0xed - paddsw Pq, Qq */
9682FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
9683/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
9684FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
9685/* Opcode 0xf3 0x0f 0xed - invalid */
9686/* Opcode 0xf2 0x0f 0xed - invalid */
9687
9688/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
9689FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
9690/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
9691FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
9692/* Opcode 0xf3 0x0f 0xee - invalid */
9693/* Opcode 0xf2 0x0f 0xee - invalid */
9694
9695
9696/** Opcode 0x0f 0xef - pxor Pq, Qq */
9697FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
9698{
9699 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9700 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
9701}
9702
9703
9704/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
9705FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
9706{
9707 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9708 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
9709}
9710
9711
9712/* Opcode 0xf3 0x0f 0xef - invalid */
9713/* Opcode 0xf2 0x0f 0xef - invalid */
9714
9715/* Opcode 0x0f 0xf0 - invalid */
9716/* Opcode 0x66 0x0f 0xf0 - invalid */
9717/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
9718FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
9719
9720/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
9721FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
9722/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
9723FNIEMOP_STUB(iemOp_psllw_Vx_W);
9724/* Opcode 0xf2 0x0f 0xf1 - invalid */
9725
9726/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
9727FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
9728/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
9729FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
9730/* Opcode 0xf2 0x0f 0xf2 - invalid */
9731
9732/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
9733FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
9734/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
9735FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
9736/* Opcode 0xf2 0x0f 0xf3 - invalid */
9737
9738/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
9739FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
9740/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
9741FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
9742/* Opcode 0xf2 0x0f 0xf4 - invalid */
9743
9744/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
9745FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
9746/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
9747FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
9748/* Opcode 0xf2 0x0f 0xf5 - invalid */
9749
9750/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
9751FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
9752/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
9753FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
9754/* Opcode 0xf2 0x0f 0xf6 - invalid */
9755
9756/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
9757FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
9758/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
9759FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
9760/* Opcode 0xf2 0x0f 0xf7 - invalid */
9761
9762/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
9763FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
9764/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
9765FNIEMOP_STUB(iemOp_psubb_Vx_W);
9766/* Opcode 0xf2 0x0f 0xf8 - invalid */
9767
9768/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
9769FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
9770/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
9771FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
9772/* Opcode 0xf2 0x0f 0xf9 - invalid */
9773
9774/** Opcode 0x0f 0xfa - psubd Pq, Qq */
9775FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
9776/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
9777FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
9778/* Opcode 0xf2 0x0f 0xfa - invalid */
9779
9780/** Opcode 0x0f 0xfb - psubq Pq, Qq */
9781FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
9782/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
9783FNIEMOP_STUB(iemOp_psubq_Vx_W);
9784/* Opcode 0xf2 0x0f 0xfb - invalid */
9785
9786/** Opcode 0x0f 0xfc - paddb Pq, Qq */
9787FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
9788/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
9789FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
9790/* Opcode 0xf2 0x0f 0xfc - invalid */
9791
9792/** Opcode 0x0f 0xfd - paddw Pq, Qq */
9793FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
9794/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
9795FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
9796/* Opcode 0xf2 0x0f 0xfd - invalid */
9797
9798/** Opcode 0x0f 0xfe - paddd Pq, Qq */
9799FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
9800/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
9801FNIEMOP_STUB(iemOp_paddd_Vx_W);
9802/* Opcode 0xf2 0x0f 0xfe - invalid */
9803
9804
9805/** Opcode **** 0x0f 0xff - UD0 */
9806FNIEMOP_DEF(iemOp_ud0)
9807{
9808 IEMOP_MNEMONIC(ud0, "ud0");
9809 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
9810 {
9811 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
9812#ifndef TST_IEM_CHECK_MC
9813 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9814 {
9815 RTGCPTR GCPtrEff;
9816 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
9817 if (rcStrict != VINF_SUCCESS)
9818 return rcStrict;
9819 }
9820#endif
9821 IEMOP_HLP_DONE_DECODING();
9822 }
9823 return IEMOP_RAISE_INVALID_OPCODE();
9824}
9825
9826
9827
9828/**
9829 * Two byte opcode map, first byte 0x0f.
9830 *
9831 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
9832 * check if it needs updating as well when making changes.
9833 */
9834IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
9835{
9836 /* no prefix, 066h prefix f3h prefix, f2h prefix */
9837 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
9838 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
9839 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
9840 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
9841 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
9842 /* 0x05 */ IEMOP_X4(iemOp_syscall),
9843 /* 0x06 */ IEMOP_X4(iemOp_clts),
9844 /* 0x07 */ IEMOP_X4(iemOp_sysret),
9845 /* 0x08 */ IEMOP_X4(iemOp_invd),
9846 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
9847 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
9848 /* 0x0b */ IEMOP_X4(iemOp_ud2),
9849 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
9850 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
9851 /* 0x0e */ IEMOP_X4(iemOp_femms),
9852 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
9853
9854 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
9855 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
9856 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
9857 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9858 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9859 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9860 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
9861 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9862 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
9863 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
9864 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
9865 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
9866 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
9867 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
9868 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
9869 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
9870
9871 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
9872 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
9873 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
9874 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
9875 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
9876 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9877 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
9878 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9879 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9880 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9881 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
9882 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9883 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
9884 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
9885 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9886 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9887
9888 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
9889 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
9890 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
9891 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
9892 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
9893 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
9894 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
9895 /* 0x37 */ IEMOP_X4(iemOp_getsec),
9896 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
9897 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9898 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
9899 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9900 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9901 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9902 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9903 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9904
9905 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
9906 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
9907 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
9908 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
9909 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
9910 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
9911 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
9912 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
9913 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
9914 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
9915 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
9916 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
9917 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
9918 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
9919 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
9920 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
9921
9922 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9923 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
9924 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
9925 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
9926 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9927 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9928 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9929 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9930 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
9931 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
9932 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
9933 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
9934 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
9935 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
9936 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
9937 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
9938
9939 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9940 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9941 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9942 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9943 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9944 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9945 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9946 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9947 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9948 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9949 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9950 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9951 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9952 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9953 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9954 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
9955
9956 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
9957 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
9958 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
9959 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
9960 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9961 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9962 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9963 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9964
9965 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9966 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9967 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9968 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9969 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
9970 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
9971 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
9972 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
9973
9974 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
9975 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
9976 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
9977 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
9978 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
9979 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
9980 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
9981 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
9982 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
9983 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
9984 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
9985 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
9986 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
9987 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
9988 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
9989 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
9990
9991 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
9992 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
9993 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
9994 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
9995 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
9996 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
9997 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
9998 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
9999 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
10000 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
10001 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
10002 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
10003 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
10004 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
10005 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
10006 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
10007
10008 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
10009 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
10010 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
10011 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
10012 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
10013 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
10014 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
10015 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
10016 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
10017 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
10018 /* 0xaa */ IEMOP_X4(iemOp_rsm),
10019 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
10020 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
10021 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
10022 /* 0xae */ IEMOP_X4(iemOp_Grp15),
10023 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
10024
10025 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
10026 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
10027 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
10028 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
10029 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
10030 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
10031 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
10032 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
10033 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
10034 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
10035 /* 0xba */ IEMOP_X4(iemOp_Grp8),
10036 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
10037 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
10038 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
10039 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
10040 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
10041
10042 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
10043 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
10044 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
10045 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10046 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
10047 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
10048 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
10049 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
10050 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
10051 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
10052 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
10053 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
10054 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
10055 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
10056 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
10057 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
10058
10059 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
10060 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10061 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10062 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10063 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10064 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10065 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
10066 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10067 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10068 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10069 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10070 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10071 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10072 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10073 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10074 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10075
10076 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10077 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10078 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10079 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10080 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10081 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10082 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
10083 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10084 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10085 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10086 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10087 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10088 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10089 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10090 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10091 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10092
10093 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
10094 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10095 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10096 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10097 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10098 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10099 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10100 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10101 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10102 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10103 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10104 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10105 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10106 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10107 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10108 /* 0xff */ IEMOP_X4(iemOp_ud0),
10109};
10110AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
10111
10112/** @} */
10113
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette