VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 74015

Last change on this file since 74015 was 74015, checked in by vboxsync, 6 years ago

VMM/IEM: Nested VMX: bugref:9180 Add missing diagnostics for vmclear, vmptrld, vmptrst. Moved a couple of macros to VMX specific
code. Fixed confusing/conflicting "VmxRoot" failure for vmxon, replaced with "AlreadyVmxRoot" to distinguish this particular case.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 341.9 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 74015 2018-09-01 04:20:29Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
38 }
39
40 /* Ignore operand size here, memory refs are always 16-bit. */
41 IEM_MC_BEGIN(2, 0);
42 IEM_MC_ARG(uint16_t, iEffSeg, 0);
43 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
44 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
45 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
46 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
47 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
48 IEM_MC_END();
49 return VINF_SUCCESS;
50}
51
52
53/** Opcode 0x0f 0x00 /1. */
54FNIEMOPRM_DEF(iemOp_Grp6_str)
55{
56 IEMOP_MNEMONIC(str, "str Rv/Mw");
57 IEMOP_HLP_MIN_286();
58 IEMOP_HLP_NO_REAL_OR_V86_MODE();
59
60
61 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
62 {
63 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
64 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
65 }
66
67 /* Ignore operand size here, memory refs are always 16-bit. */
68 IEM_MC_BEGIN(2, 0);
69 IEM_MC_ARG(uint16_t, iEffSeg, 0);
70 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
71 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
72 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
73 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
74 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
75 IEM_MC_END();
76 return VINF_SUCCESS;
77}
78
79
80/** Opcode 0x0f 0x00 /2. */
81FNIEMOPRM_DEF(iemOp_Grp6_lldt)
82{
83 IEMOP_MNEMONIC(lldt, "lldt Ew");
84 IEMOP_HLP_MIN_286();
85 IEMOP_HLP_NO_REAL_OR_V86_MODE();
86
87 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
88 {
89 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
90 IEM_MC_BEGIN(1, 0);
91 IEM_MC_ARG(uint16_t, u16Sel, 0);
92 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
93 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
94 IEM_MC_END();
95 }
96 else
97 {
98 IEM_MC_BEGIN(1, 1);
99 IEM_MC_ARG(uint16_t, u16Sel, 0);
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
102 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
103 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
104 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
105 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
106 IEM_MC_END();
107 }
108 return VINF_SUCCESS;
109}
110
111
112/** Opcode 0x0f 0x00 /3. */
113FNIEMOPRM_DEF(iemOp_Grp6_ltr)
114{
115 IEMOP_MNEMONIC(ltr, "ltr Ew");
116 IEMOP_HLP_MIN_286();
117 IEMOP_HLP_NO_REAL_OR_V86_MODE();
118
119 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
120 {
121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
122 IEM_MC_BEGIN(1, 0);
123 IEM_MC_ARG(uint16_t, u16Sel, 0);
124 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
125 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
126 IEM_MC_END();
127 }
128 else
129 {
130 IEM_MC_BEGIN(1, 1);
131 IEM_MC_ARG(uint16_t, u16Sel, 0);
132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
135 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
136 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
137 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
138 IEM_MC_END();
139 }
140 return VINF_SUCCESS;
141}
142
143
144/** Opcode 0x0f 0x00 /3. */
145FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
146{
147 IEMOP_HLP_MIN_286();
148 IEMOP_HLP_NO_REAL_OR_V86_MODE();
149
150 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
151 {
152 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
153 IEM_MC_BEGIN(2, 0);
154 IEM_MC_ARG(uint16_t, u16Sel, 0);
155 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
156 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
157 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
158 IEM_MC_END();
159 }
160 else
161 {
162 IEM_MC_BEGIN(2, 1);
163 IEM_MC_ARG(uint16_t, u16Sel, 0);
164 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
167 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
168 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
169 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
170 IEM_MC_END();
171 }
172 return VINF_SUCCESS;
173}
174
175
176/** Opcode 0x0f 0x00 /4. */
177FNIEMOPRM_DEF(iemOp_Grp6_verr)
178{
179 IEMOP_MNEMONIC(verr, "verr Ew");
180 IEMOP_HLP_MIN_286();
181 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
182}
183
184
185/** Opcode 0x0f 0x00 /5. */
186FNIEMOPRM_DEF(iemOp_Grp6_verw)
187{
188 IEMOP_MNEMONIC(verw, "verw Ew");
189 IEMOP_HLP_MIN_286();
190 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
191}
192
193
194/**
195 * Group 6 jump table.
196 */
197IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
198{
199 iemOp_Grp6_sldt,
200 iemOp_Grp6_str,
201 iemOp_Grp6_lldt,
202 iemOp_Grp6_ltr,
203 iemOp_Grp6_verr,
204 iemOp_Grp6_verw,
205 iemOp_InvalidWithRM,
206 iemOp_InvalidWithRM
207};
208
209/** Opcode 0x0f 0x00. */
210FNIEMOP_DEF(iemOp_Grp6)
211{
212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
213 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
214}
215
216
217/** Opcode 0x0f 0x01 /0. */
218FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
219{
220 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
221 IEMOP_HLP_MIN_286();
222 IEMOP_HLP_64BIT_OP_SIZE();
223 IEM_MC_BEGIN(2, 1);
224 IEM_MC_ARG(uint8_t, iEffSeg, 0);
225 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
228 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
229 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
230 IEM_MC_END();
231 return VINF_SUCCESS;
232}
233
234
235/** Opcode 0x0f 0x01 /0. */
236FNIEMOP_DEF(iemOp_Grp7_vmcall)
237{
238 IEMOP_MNEMONIC(vmcall, "vmcall");
239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
240
241 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
242 want all hypercalls regardless of instruction used, and if a
243 hypercall isn't handled by GIM or HMSvm will raise an #UD.
244 (NEM/win makes ASSUMPTIONS about this behavior.) */
245 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
246}
247
248
249/** Opcode 0x0f 0x01 /0. */
250#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
251FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
252{
253 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
254 IEMOP_HLP_DONE_DECODING();
255 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
256}
257#else
258FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
259{
260 IEMOP_BITCH_ABOUT_STUB();
261 return IEMOP_RAISE_INVALID_OPCODE();
262}
263#endif
264
265
266/** Opcode 0x0f 0x01 /0. */
267FNIEMOP_DEF(iemOp_Grp7_vmresume)
268{
269 IEMOP_BITCH_ABOUT_STUB();
270 return IEMOP_RAISE_INVALID_OPCODE();
271}
272
273
274/** Opcode 0x0f 0x01 /0. */
275#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
276FNIEMOP_DEF(iemOp_Grp7_vmxoff)
277{
278 IEMOP_MNEMONIC(vmxoff, "vmxoff");
279 IEMOP_HLP_DONE_DECODING();
280 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
281}
282#else
283FNIEMOP_DEF(iemOp_Grp7_vmxoff)
284{
285 IEMOP_BITCH_ABOUT_STUB();
286 return IEMOP_RAISE_INVALID_OPCODE();
287}
288#endif
289
290
291/** Opcode 0x0f 0x01 /1. */
292FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
293{
294 IEMOP_MNEMONIC(sidt, "sidt Ms");
295 IEMOP_HLP_MIN_286();
296 IEMOP_HLP_64BIT_OP_SIZE();
297 IEM_MC_BEGIN(2, 1);
298 IEM_MC_ARG(uint8_t, iEffSeg, 0);
299 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
302 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
303 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
304 IEM_MC_END();
305 return VINF_SUCCESS;
306}
307
308
309/** Opcode 0x0f 0x01 /1. */
310FNIEMOP_DEF(iemOp_Grp7_monitor)
311{
312 IEMOP_MNEMONIC(monitor, "monitor");
313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
314 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
315}
316
317
318/** Opcode 0x0f 0x01 /1. */
319FNIEMOP_DEF(iemOp_Grp7_mwait)
320{
321 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
323 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
324}
325
326
327/** Opcode 0x0f 0x01 /2. */
328FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
329{
330 IEMOP_MNEMONIC(lgdt, "lgdt");
331 IEMOP_HLP_64BIT_OP_SIZE();
332 IEM_MC_BEGIN(3, 1);
333 IEM_MC_ARG(uint8_t, iEffSeg, 0);
334 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
335 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
338 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
339 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
340 IEM_MC_END();
341 return VINF_SUCCESS;
342}
343
344
345/** Opcode 0x0f 0x01 0xd0. */
346FNIEMOP_DEF(iemOp_Grp7_xgetbv)
347{
348 IEMOP_MNEMONIC(xgetbv, "xgetbv");
349 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
350 {
351 /** @todo r=ramshankar: We should use
352 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
353 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
354 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
355 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
356 }
357 return IEMOP_RAISE_INVALID_OPCODE();
358}
359
360
361/** Opcode 0x0f 0x01 0xd1. */
362FNIEMOP_DEF(iemOp_Grp7_xsetbv)
363{
364 IEMOP_MNEMONIC(xsetbv, "xsetbv");
365 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
366 {
367 /** @todo r=ramshankar: We should use
368 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
369 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
370 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
371 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
372 }
373 return IEMOP_RAISE_INVALID_OPCODE();
374}
375
376
377/** Opcode 0x0f 0x01 /3. */
378FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
379{
380 IEMOP_MNEMONIC(lidt, "lidt");
381 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
382 ? IEMMODE_64BIT
383 : pVCpu->iem.s.enmEffOpSize;
384 IEM_MC_BEGIN(3, 1);
385 IEM_MC_ARG(uint8_t, iEffSeg, 0);
386 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
387 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
390 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
391 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
392 IEM_MC_END();
393 return VINF_SUCCESS;
394}
395
396
397/** Opcode 0x0f 0x01 0xd8. */
398#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
399FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
400{
401 IEMOP_MNEMONIC(vmrun, "vmrun");
402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
403 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
404}
405#else
406FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
407#endif
408
409/** Opcode 0x0f 0x01 0xd9. */
410FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
411{
412 IEMOP_MNEMONIC(vmmcall, "vmmcall");
413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
414
415 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
416 want all hypercalls regardless of instruction used, and if a
417 hypercall isn't handled by GIM or HMSvm will raise an #UD.
418 (NEM/win makes ASSUMPTIONS about this behavior.) */
419 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
420}
421
422/** Opcode 0x0f 0x01 0xda. */
423#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
424FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
425{
426 IEMOP_MNEMONIC(vmload, "vmload");
427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
428 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
429}
430#else
431FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
432#endif
433
434
435/** Opcode 0x0f 0x01 0xdb. */
436#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
437FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
438{
439 IEMOP_MNEMONIC(vmsave, "vmsave");
440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
441 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
442}
443#else
444FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
445#endif
446
447
448/** Opcode 0x0f 0x01 0xdc. */
449#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
450FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
451{
452 IEMOP_MNEMONIC(stgi, "stgi");
453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
454 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
455}
456#else
457FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
458#endif
459
460
461/** Opcode 0x0f 0x01 0xdd. */
462#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
463FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
464{
465 IEMOP_MNEMONIC(clgi, "clgi");
466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
467 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
468}
469#else
470FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
471#endif
472
473
474/** Opcode 0x0f 0x01 0xdf. */
475#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
476FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
477{
478 IEMOP_MNEMONIC(invlpga, "invlpga");
479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
480 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
481}
482#else
483FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
484#endif
485
486
487/** Opcode 0x0f 0x01 0xde. */
488#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
489FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
490{
491 IEMOP_MNEMONIC(skinit, "skinit");
492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
493 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
494}
495#else
496FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
497#endif
498
499
500/** Opcode 0x0f 0x01 /4. */
501FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
502{
503 IEMOP_MNEMONIC(smsw, "smsw");
504 IEMOP_HLP_MIN_286();
505 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
506 {
507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
508 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
509 }
510
511 /* Ignore operand size here, memory refs are always 16-bit. */
512 IEM_MC_BEGIN(2, 0);
513 IEM_MC_ARG(uint16_t, iEffSeg, 0);
514 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
517 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
518 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
519 IEM_MC_END();
520 return VINF_SUCCESS;
521}
522
523
524/** Opcode 0x0f 0x01 /6. */
525FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
526{
527 /* The operand size is effectively ignored, all is 16-bit and only the
528 lower 3-bits are used. */
529 IEMOP_MNEMONIC(lmsw, "lmsw");
530 IEMOP_HLP_MIN_286();
531 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
532 {
533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
534 IEM_MC_BEGIN(1, 0);
535 IEM_MC_ARG(uint16_t, u16Tmp, 0);
536 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
537 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
538 IEM_MC_END();
539 }
540 else
541 {
542 IEM_MC_BEGIN(1, 1);
543 IEM_MC_ARG(uint16_t, u16Tmp, 0);
544 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
547 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
548 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
549 IEM_MC_END();
550 }
551 return VINF_SUCCESS;
552}
553
554
555/** Opcode 0x0f 0x01 /7. */
556FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
557{
558 IEMOP_MNEMONIC(invlpg, "invlpg");
559 IEMOP_HLP_MIN_486();
560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
561 IEM_MC_BEGIN(1, 1);
562 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
564 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
565 IEM_MC_END();
566 return VINF_SUCCESS;
567}
568
569
570/** Opcode 0x0f 0x01 /7. */
571FNIEMOP_DEF(iemOp_Grp7_swapgs)
572{
573 IEMOP_MNEMONIC(swapgs, "swapgs");
574 IEMOP_HLP_ONLY_64BIT();
575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
576 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
577}
578
579
580/** Opcode 0x0f 0x01 /7. */
581FNIEMOP_DEF(iemOp_Grp7_rdtscp)
582{
583 IEMOP_MNEMONIC(rdtscp, "rdtscp");
584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
585 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
586}
587
588
589/**
590 * Group 7 jump table, memory variant.
591 */
592IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
593{
594 iemOp_Grp7_sgdt,
595 iemOp_Grp7_sidt,
596 iemOp_Grp7_lgdt,
597 iemOp_Grp7_lidt,
598 iemOp_Grp7_smsw,
599 iemOp_InvalidWithRM,
600 iemOp_Grp7_lmsw,
601 iemOp_Grp7_invlpg
602};
603
604
605/** Opcode 0x0f 0x01. */
606FNIEMOP_DEF(iemOp_Grp7)
607{
608 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
609 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
610 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
611
612 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
613 {
614 case 0:
615 switch (bRm & X86_MODRM_RM_MASK)
616 {
617 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
618 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
619 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
620 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
621 }
622 return IEMOP_RAISE_INVALID_OPCODE();
623
624 case 1:
625 switch (bRm & X86_MODRM_RM_MASK)
626 {
627 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
628 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
629 }
630 return IEMOP_RAISE_INVALID_OPCODE();
631
632 case 2:
633 switch (bRm & X86_MODRM_RM_MASK)
634 {
635 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
636 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
637 }
638 return IEMOP_RAISE_INVALID_OPCODE();
639
640 case 3:
641 switch (bRm & X86_MODRM_RM_MASK)
642 {
643 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
644 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
645 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
646 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
647 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
648 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
649 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
650 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
651 IEM_NOT_REACHED_DEFAULT_CASE_RET();
652 }
653
654 case 4:
655 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
656
657 case 5:
658 return IEMOP_RAISE_INVALID_OPCODE();
659
660 case 6:
661 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
662
663 case 7:
664 switch (bRm & X86_MODRM_RM_MASK)
665 {
666 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
667 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
668 }
669 return IEMOP_RAISE_INVALID_OPCODE();
670
671 IEM_NOT_REACHED_DEFAULT_CASE_RET();
672 }
673}
674
675/** Opcode 0x0f 0x00 /3. */
676FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
677{
678 IEMOP_HLP_NO_REAL_OR_V86_MODE();
679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
680
681 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
682 {
683 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
684 switch (pVCpu->iem.s.enmEffOpSize)
685 {
686 case IEMMODE_16BIT:
687 {
688 IEM_MC_BEGIN(3, 0);
689 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
690 IEM_MC_ARG(uint16_t, u16Sel, 1);
691 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
692
693 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
694 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
695 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
696
697 IEM_MC_END();
698 return VINF_SUCCESS;
699 }
700
701 case IEMMODE_32BIT:
702 case IEMMODE_64BIT:
703 {
704 IEM_MC_BEGIN(3, 0);
705 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
706 IEM_MC_ARG(uint16_t, u16Sel, 1);
707 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
708
709 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
710 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
711 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
712
713 IEM_MC_END();
714 return VINF_SUCCESS;
715 }
716
717 IEM_NOT_REACHED_DEFAULT_CASE_RET();
718 }
719 }
720 else
721 {
722 switch (pVCpu->iem.s.enmEffOpSize)
723 {
724 case IEMMODE_16BIT:
725 {
726 IEM_MC_BEGIN(3, 1);
727 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
728 IEM_MC_ARG(uint16_t, u16Sel, 1);
729 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
731
732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
733 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
734
735 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
736 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
737 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
738
739 IEM_MC_END();
740 return VINF_SUCCESS;
741 }
742
743 case IEMMODE_32BIT:
744 case IEMMODE_64BIT:
745 {
746 IEM_MC_BEGIN(3, 1);
747 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
748 IEM_MC_ARG(uint16_t, u16Sel, 1);
749 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
751
752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
753 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
754/** @todo testcase: make sure it's a 16-bit read. */
755
756 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
757 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
758 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
759
760 IEM_MC_END();
761 return VINF_SUCCESS;
762 }
763
764 IEM_NOT_REACHED_DEFAULT_CASE_RET();
765 }
766 }
767}
768
769
770
771/** Opcode 0x0f 0x02. */
772FNIEMOP_DEF(iemOp_lar_Gv_Ew)
773{
774 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
775 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
776}
777
778
779/** Opcode 0x0f 0x03. */
780FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
781{
782 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
783 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
784}
785
786
787/** Opcode 0x0f 0x05. */
788FNIEMOP_DEF(iemOp_syscall)
789{
790 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
792 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
793}
794
795
796/** Opcode 0x0f 0x06. */
797FNIEMOP_DEF(iemOp_clts)
798{
799 IEMOP_MNEMONIC(clts, "clts");
800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
801 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
802}
803
804
805/** Opcode 0x0f 0x07. */
806FNIEMOP_DEF(iemOp_sysret)
807{
808 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
810 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
811}
812
813
814/** Opcode 0x0f 0x08. */
815FNIEMOP_DEF(iemOp_invd)
816{
817 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
818 IEMOP_HLP_MIN_486();
819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
820 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
821}
822
823
824/** Opcode 0x0f 0x09. */
825FNIEMOP_DEF(iemOp_wbinvd)
826{
827 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
828 IEMOP_HLP_MIN_486();
829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
830 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
831}
832
833
834/** Opcode 0x0f 0x0b. */
835FNIEMOP_DEF(iemOp_ud2)
836{
837 IEMOP_MNEMONIC(ud2, "ud2");
838 return IEMOP_RAISE_INVALID_OPCODE();
839}
840
841/** Opcode 0x0f 0x0d. */
842FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
843{
844 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
845 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
846 {
847 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
848 return IEMOP_RAISE_INVALID_OPCODE();
849 }
850
851 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
852 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
853 {
854 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
855 return IEMOP_RAISE_INVALID_OPCODE();
856 }
857
858 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
859 {
860 case 2: /* Aliased to /0 for the time being. */
861 case 4: /* Aliased to /0 for the time being. */
862 case 5: /* Aliased to /0 for the time being. */
863 case 6: /* Aliased to /0 for the time being. */
864 case 7: /* Aliased to /0 for the time being. */
865 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
866 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
867 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
868 IEM_NOT_REACHED_DEFAULT_CASE_RET();
869 }
870
871 IEM_MC_BEGIN(0, 1);
872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
875 /* Currently a NOP. */
876 NOREF(GCPtrEffSrc);
877 IEM_MC_ADVANCE_RIP();
878 IEM_MC_END();
879 return VINF_SUCCESS;
880}
881
882
883/** Opcode 0x0f 0x0e. */
884FNIEMOP_DEF(iemOp_femms)
885{
886 IEMOP_MNEMONIC(femms, "femms");
887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
888
889 IEM_MC_BEGIN(0,0);
890 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
891 IEM_MC_MAYBE_RAISE_FPU_XCPT();
892 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
893 IEM_MC_FPU_FROM_MMX_MODE();
894 IEM_MC_ADVANCE_RIP();
895 IEM_MC_END();
896 return VINF_SUCCESS;
897}
898
899
900/** Opcode 0x0f 0x0f. */
901FNIEMOP_DEF(iemOp_3Dnow)
902{
903 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
904 {
905 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
906 return IEMOP_RAISE_INVALID_OPCODE();
907 }
908
909#ifdef IEM_WITH_3DNOW
910 /* This is pretty sparse, use switch instead of table. */
911 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
912 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
913#else
914 IEMOP_BITCH_ABOUT_STUB();
915 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
916#endif
917}
918
919
920/**
921 * @opcode 0x10
922 * @oppfx none
923 * @opcpuid sse
924 * @opgroup og_sse_simdfp_datamove
925 * @opxcpttype 4UA
926 * @optest op1=1 op2=2 -> op1=2
927 * @optest op1=0 op2=-22 -> op1=-22
928 */
929FNIEMOP_DEF(iemOp_movups_Vps_Wps)
930{
931 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
932 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
933 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
934 {
935 /*
936 * Register, register.
937 */
938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
939 IEM_MC_BEGIN(0, 0);
940 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
941 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
942 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
943 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
944 IEM_MC_ADVANCE_RIP();
945 IEM_MC_END();
946 }
947 else
948 {
949 /*
950 * Memory, register.
951 */
952 IEM_MC_BEGIN(0, 2);
953 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
955
956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
958 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
959 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
960
961 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
962 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
963
964 IEM_MC_ADVANCE_RIP();
965 IEM_MC_END();
966 }
967 return VINF_SUCCESS;
968
969}
970
971
972/**
973 * @opcode 0x10
974 * @oppfx 0x66
975 * @opcpuid sse2
976 * @opgroup og_sse2_pcksclr_datamove
977 * @opxcpttype 4UA
978 * @optest op1=1 op2=2 -> op1=2
979 * @optest op1=0 op2=-42 -> op1=-42
980 */
981FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
982{
983 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
984 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
985 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
986 {
987 /*
988 * Register, register.
989 */
990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
991 IEM_MC_BEGIN(0, 0);
992 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
993 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
994 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
995 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
996 IEM_MC_ADVANCE_RIP();
997 IEM_MC_END();
998 }
999 else
1000 {
1001 /*
1002 * Memory, register.
1003 */
1004 IEM_MC_BEGIN(0, 2);
1005 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1006 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1007
1008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1010 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1011 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1012
1013 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1014 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1015
1016 IEM_MC_ADVANCE_RIP();
1017 IEM_MC_END();
1018 }
1019 return VINF_SUCCESS;
1020}
1021
1022
1023/**
1024 * @opcode 0x10
1025 * @oppfx 0xf3
1026 * @opcpuid sse
1027 * @opgroup og_sse_simdfp_datamove
1028 * @opxcpttype 5
1029 * @optest op1=1 op2=2 -> op1=2
1030 * @optest op1=0 op2=-22 -> op1=-22
1031 */
1032FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1033{
1034 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1035 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1036 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1037 {
1038 /*
1039 * Register, register.
1040 */
1041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1042 IEM_MC_BEGIN(0, 1);
1043 IEM_MC_LOCAL(uint32_t, uSrc);
1044
1045 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1046 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1047 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1048 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1049
1050 IEM_MC_ADVANCE_RIP();
1051 IEM_MC_END();
1052 }
1053 else
1054 {
1055 /*
1056 * Memory, register.
1057 */
1058 IEM_MC_BEGIN(0, 2);
1059 IEM_MC_LOCAL(uint32_t, uSrc);
1060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1061
1062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1064 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1065 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1066
1067 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1068 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1069
1070 IEM_MC_ADVANCE_RIP();
1071 IEM_MC_END();
1072 }
1073 return VINF_SUCCESS;
1074}
1075
1076
1077/**
1078 * @opcode 0x10
1079 * @oppfx 0xf2
1080 * @opcpuid sse2
1081 * @opgroup og_sse2_pcksclr_datamove
1082 * @opxcpttype 5
1083 * @optest op1=1 op2=2 -> op1=2
1084 * @optest op1=0 op2=-42 -> op1=-42
1085 */
1086FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1087{
1088 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1089 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1090 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1091 {
1092 /*
1093 * Register, register.
1094 */
1095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1096 IEM_MC_BEGIN(0, 1);
1097 IEM_MC_LOCAL(uint64_t, uSrc);
1098
1099 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1100 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1101 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1102 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1103
1104 IEM_MC_ADVANCE_RIP();
1105 IEM_MC_END();
1106 }
1107 else
1108 {
1109 /*
1110 * Memory, register.
1111 */
1112 IEM_MC_BEGIN(0, 2);
1113 IEM_MC_LOCAL(uint64_t, uSrc);
1114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1115
1116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1118 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1119 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1120
1121 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1122 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1123
1124 IEM_MC_ADVANCE_RIP();
1125 IEM_MC_END();
1126 }
1127 return VINF_SUCCESS;
1128}
1129
1130
1131/**
1132 * @opcode 0x11
1133 * @oppfx none
1134 * @opcpuid sse
1135 * @opgroup og_sse_simdfp_datamove
1136 * @opxcpttype 4UA
1137 * @optest op1=1 op2=2 -> op1=2
1138 * @optest op1=0 op2=-42 -> op1=-42
1139 */
1140FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1141{
1142 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1143 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1144 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1145 {
1146 /*
1147 * Register, register.
1148 */
1149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1150 IEM_MC_BEGIN(0, 0);
1151 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1152 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1153 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1154 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1155 IEM_MC_ADVANCE_RIP();
1156 IEM_MC_END();
1157 }
1158 else
1159 {
1160 /*
1161 * Memory, register.
1162 */
1163 IEM_MC_BEGIN(0, 2);
1164 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1166
1167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1169 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1170 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1171
1172 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1173 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1174
1175 IEM_MC_ADVANCE_RIP();
1176 IEM_MC_END();
1177 }
1178 return VINF_SUCCESS;
1179}
1180
1181
1182/**
1183 * @opcode 0x11
1184 * @oppfx 0x66
1185 * @opcpuid sse2
1186 * @opgroup og_sse2_pcksclr_datamove
1187 * @opxcpttype 4UA
1188 * @optest op1=1 op2=2 -> op1=2
1189 * @optest op1=0 op2=-42 -> op1=-42
1190 */
1191FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1192{
1193 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1194 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1195 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1196 {
1197 /*
1198 * Register, register.
1199 */
1200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1201 IEM_MC_BEGIN(0, 0);
1202 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1203 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1204 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1205 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1206 IEM_MC_ADVANCE_RIP();
1207 IEM_MC_END();
1208 }
1209 else
1210 {
1211 /*
1212 * Memory, register.
1213 */
1214 IEM_MC_BEGIN(0, 2);
1215 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1217
1218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1220 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1221 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1222
1223 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1224 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1225
1226 IEM_MC_ADVANCE_RIP();
1227 IEM_MC_END();
1228 }
1229 return VINF_SUCCESS;
1230}
1231
1232
1233/**
1234 * @opcode 0x11
1235 * @oppfx 0xf3
1236 * @opcpuid sse
1237 * @opgroup og_sse_simdfp_datamove
1238 * @opxcpttype 5
1239 * @optest op1=1 op2=2 -> op1=2
1240 * @optest op1=0 op2=-22 -> op1=-22
1241 */
1242FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1243{
1244 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1245 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1246 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1247 {
1248 /*
1249 * Register, register.
1250 */
1251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1252 IEM_MC_BEGIN(0, 1);
1253 IEM_MC_LOCAL(uint32_t, uSrc);
1254
1255 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1256 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1257 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1258 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1259
1260 IEM_MC_ADVANCE_RIP();
1261 IEM_MC_END();
1262 }
1263 else
1264 {
1265 /*
1266 * Memory, register.
1267 */
1268 IEM_MC_BEGIN(0, 2);
1269 IEM_MC_LOCAL(uint32_t, uSrc);
1270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1271
1272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1274 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1275 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1276
1277 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1278 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1279
1280 IEM_MC_ADVANCE_RIP();
1281 IEM_MC_END();
1282 }
1283 return VINF_SUCCESS;
1284}
1285
1286
1287/**
1288 * @opcode 0x11
1289 * @oppfx 0xf2
1290 * @opcpuid sse2
1291 * @opgroup og_sse2_pcksclr_datamove
1292 * @opxcpttype 5
1293 * @optest op1=1 op2=2 -> op1=2
1294 * @optest op1=0 op2=-42 -> op1=-42
1295 */
1296FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1297{
1298 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1299 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1300 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1301 {
1302 /*
1303 * Register, register.
1304 */
1305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1306 IEM_MC_BEGIN(0, 1);
1307 IEM_MC_LOCAL(uint64_t, uSrc);
1308
1309 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1310 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1311 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1312 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1313
1314 IEM_MC_ADVANCE_RIP();
1315 IEM_MC_END();
1316 }
1317 else
1318 {
1319 /*
1320 * Memory, register.
1321 */
1322 IEM_MC_BEGIN(0, 2);
1323 IEM_MC_LOCAL(uint64_t, uSrc);
1324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1325
1326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1328 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1329 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1330
1331 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1332 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1333
1334 IEM_MC_ADVANCE_RIP();
1335 IEM_MC_END();
1336 }
1337 return VINF_SUCCESS;
1338}
1339
1340
1341FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1342{
1343 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1344 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1345 {
1346 /**
1347 * @opcode 0x12
1348 * @opcodesub 11 mr/reg
1349 * @oppfx none
1350 * @opcpuid sse
1351 * @opgroup og_sse_simdfp_datamove
1352 * @opxcpttype 5
1353 * @optest op1=1 op2=2 -> op1=2
1354 * @optest op1=0 op2=-42 -> op1=-42
1355 */
1356 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1357
1358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1359 IEM_MC_BEGIN(0, 1);
1360 IEM_MC_LOCAL(uint64_t, uSrc);
1361
1362 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1363 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1364 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1365 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1366
1367 IEM_MC_ADVANCE_RIP();
1368 IEM_MC_END();
1369 }
1370 else
1371 {
1372 /**
1373 * @opdone
1374 * @opcode 0x12
1375 * @opcodesub !11 mr/reg
1376 * @oppfx none
1377 * @opcpuid sse
1378 * @opgroup og_sse_simdfp_datamove
1379 * @opxcpttype 5
1380 * @optest op1=1 op2=2 -> op1=2
1381 * @optest op1=0 op2=-42 -> op1=-42
1382 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1383 */
1384 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1385
1386 IEM_MC_BEGIN(0, 2);
1387 IEM_MC_LOCAL(uint64_t, uSrc);
1388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1389
1390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1392 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1393 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1394
1395 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1396 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1397
1398 IEM_MC_ADVANCE_RIP();
1399 IEM_MC_END();
1400 }
1401 return VINF_SUCCESS;
1402}
1403
1404
1405/**
1406 * @opcode 0x12
1407 * @opcodesub !11 mr/reg
1408 * @oppfx 0x66
1409 * @opcpuid sse2
1410 * @opgroup og_sse2_pcksclr_datamove
1411 * @opxcpttype 5
1412 * @optest op1=1 op2=2 -> op1=2
1413 * @optest op1=0 op2=-42 -> op1=-42
1414 */
1415FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1416{
1417 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1418 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1419 {
1420 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1421
1422 IEM_MC_BEGIN(0, 2);
1423 IEM_MC_LOCAL(uint64_t, uSrc);
1424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1425
1426 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1428 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1429 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1430
1431 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1432 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1433
1434 IEM_MC_ADVANCE_RIP();
1435 IEM_MC_END();
1436 return VINF_SUCCESS;
1437 }
1438
1439 /**
1440 * @opdone
1441 * @opmnemonic ud660f12m3
1442 * @opcode 0x12
1443 * @opcodesub 11 mr/reg
1444 * @oppfx 0x66
1445 * @opunused immediate
1446 * @opcpuid sse
1447 * @optest ->
1448 */
1449 return IEMOP_RAISE_INVALID_OPCODE();
1450}
1451
1452
1453/**
1454 * @opcode 0x12
1455 * @oppfx 0xf3
1456 * @opcpuid sse3
1457 * @opgroup og_sse3_pcksclr_datamove
1458 * @opxcpttype 4
1459 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1460 * op1=0x00000002000000020000000100000001
1461 */
1462FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1463{
1464 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1465 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1466 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1467 {
1468 /*
1469 * Register, register.
1470 */
1471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1472 IEM_MC_BEGIN(2, 0);
1473 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1474 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1475
1476 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1477 IEM_MC_PREPARE_SSE_USAGE();
1478
1479 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1480 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1481 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1482
1483 IEM_MC_ADVANCE_RIP();
1484 IEM_MC_END();
1485 }
1486 else
1487 {
1488 /*
1489 * Register, memory.
1490 */
1491 IEM_MC_BEGIN(2, 2);
1492 IEM_MC_LOCAL(RTUINT128U, uSrc);
1493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1494 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1495 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1496
1497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1499 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1500 IEM_MC_PREPARE_SSE_USAGE();
1501
1502 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1503 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1504 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1505
1506 IEM_MC_ADVANCE_RIP();
1507 IEM_MC_END();
1508 }
1509 return VINF_SUCCESS;
1510}
1511
1512
1513/**
1514 * @opcode 0x12
1515 * @oppfx 0xf2
1516 * @opcpuid sse3
1517 * @opgroup og_sse3_pcksclr_datamove
1518 * @opxcpttype 5
1519 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1520 * op1=0x22222222111111112222222211111111
1521 */
1522FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1523{
1524 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1525 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1526 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1527 {
1528 /*
1529 * Register, register.
1530 */
1531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1532 IEM_MC_BEGIN(2, 0);
1533 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1534 IEM_MC_ARG(uint64_t, uSrc, 1);
1535
1536 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1537 IEM_MC_PREPARE_SSE_USAGE();
1538
1539 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1540 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1541 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1542
1543 IEM_MC_ADVANCE_RIP();
1544 IEM_MC_END();
1545 }
1546 else
1547 {
1548 /*
1549 * Register, memory.
1550 */
1551 IEM_MC_BEGIN(2, 2);
1552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1553 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1554 IEM_MC_ARG(uint64_t, uSrc, 1);
1555
1556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1558 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1559 IEM_MC_PREPARE_SSE_USAGE();
1560
1561 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1562 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1563 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1564
1565 IEM_MC_ADVANCE_RIP();
1566 IEM_MC_END();
1567 }
1568 return VINF_SUCCESS;
1569}
1570
1571
1572/**
1573 * @opcode 0x13
1574 * @opcodesub !11 mr/reg
1575 * @oppfx none
1576 * @opcpuid sse
1577 * @opgroup og_sse_simdfp_datamove
1578 * @opxcpttype 5
1579 * @optest op1=1 op2=2 -> op1=2
1580 * @optest op1=0 op2=-42 -> op1=-42
1581 */
1582FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1583{
1584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1585 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1586 {
1587 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1588
1589 IEM_MC_BEGIN(0, 2);
1590 IEM_MC_LOCAL(uint64_t, uSrc);
1591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1592
1593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1595 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1596 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1597
1598 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1599 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1600
1601 IEM_MC_ADVANCE_RIP();
1602 IEM_MC_END();
1603 return VINF_SUCCESS;
1604 }
1605
1606 /**
1607 * @opdone
1608 * @opmnemonic ud0f13m3
1609 * @opcode 0x13
1610 * @opcodesub 11 mr/reg
1611 * @oppfx none
1612 * @opunused immediate
1613 * @opcpuid sse
1614 * @optest ->
1615 */
1616 return IEMOP_RAISE_INVALID_OPCODE();
1617}
1618
1619
1620/**
1621 * @opcode 0x13
1622 * @opcodesub !11 mr/reg
1623 * @oppfx 0x66
1624 * @opcpuid sse2
1625 * @opgroup og_sse2_pcksclr_datamove
1626 * @opxcpttype 5
1627 * @optest op1=1 op2=2 -> op1=2
1628 * @optest op1=0 op2=-42 -> op1=-42
1629 */
1630FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1631{
1632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1633 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1634 {
1635 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1636 IEM_MC_BEGIN(0, 2);
1637 IEM_MC_LOCAL(uint64_t, uSrc);
1638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1639
1640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1642 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1643 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1644
1645 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1646 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1647
1648 IEM_MC_ADVANCE_RIP();
1649 IEM_MC_END();
1650 return VINF_SUCCESS;
1651 }
1652
1653 /**
1654 * @opdone
1655 * @opmnemonic ud660f13m3
1656 * @opcode 0x13
1657 * @opcodesub 11 mr/reg
1658 * @oppfx 0x66
1659 * @opunused immediate
1660 * @opcpuid sse
1661 * @optest ->
1662 */
1663 return IEMOP_RAISE_INVALID_OPCODE();
1664}
1665
1666
1667/**
1668 * @opmnemonic udf30f13
1669 * @opcode 0x13
1670 * @oppfx 0xf3
1671 * @opunused intel-modrm
1672 * @opcpuid sse
1673 * @optest ->
1674 * @opdone
1675 */
1676
1677/**
1678 * @opmnemonic udf20f13
1679 * @opcode 0x13
1680 * @oppfx 0xf2
1681 * @opunused intel-modrm
1682 * @opcpuid sse
1683 * @optest ->
1684 * @opdone
1685 */
1686
1687/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1688FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1689/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1690FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1691
1692/**
1693 * @opdone
1694 * @opmnemonic udf30f14
1695 * @opcode 0x14
1696 * @oppfx 0xf3
1697 * @opunused intel-modrm
1698 * @opcpuid sse
1699 * @optest ->
1700 * @opdone
1701 */
1702
1703/**
1704 * @opmnemonic udf20f14
1705 * @opcode 0x14
1706 * @oppfx 0xf2
1707 * @opunused intel-modrm
1708 * @opcpuid sse
1709 * @optest ->
1710 * @opdone
1711 */
1712
1713/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1714FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1715/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1716FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1717/* Opcode 0xf3 0x0f 0x15 - invalid */
1718/* Opcode 0xf2 0x0f 0x15 - invalid */
1719
1720/**
1721 * @opdone
1722 * @opmnemonic udf30f15
1723 * @opcode 0x15
1724 * @oppfx 0xf3
1725 * @opunused intel-modrm
1726 * @opcpuid sse
1727 * @optest ->
1728 * @opdone
1729 */
1730
1731/**
1732 * @opmnemonic udf20f15
1733 * @opcode 0x15
1734 * @oppfx 0xf2
1735 * @opunused intel-modrm
1736 * @opcpuid sse
1737 * @optest ->
1738 * @opdone
1739 */
1740
1741FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1742{
1743 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1744 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1745 {
1746 /**
1747 * @opcode 0x16
1748 * @opcodesub 11 mr/reg
1749 * @oppfx none
1750 * @opcpuid sse
1751 * @opgroup og_sse_simdfp_datamove
1752 * @opxcpttype 5
1753 * @optest op1=1 op2=2 -> op1=2
1754 * @optest op1=0 op2=-42 -> op1=-42
1755 */
1756 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1757
1758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1759 IEM_MC_BEGIN(0, 1);
1760 IEM_MC_LOCAL(uint64_t, uSrc);
1761
1762 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1763 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1764 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1765 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1766
1767 IEM_MC_ADVANCE_RIP();
1768 IEM_MC_END();
1769 }
1770 else
1771 {
1772 /**
1773 * @opdone
1774 * @opcode 0x16
1775 * @opcodesub !11 mr/reg
1776 * @oppfx none
1777 * @opcpuid sse
1778 * @opgroup og_sse_simdfp_datamove
1779 * @opxcpttype 5
1780 * @optest op1=1 op2=2 -> op1=2
1781 * @optest op1=0 op2=-42 -> op1=-42
1782 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1783 */
1784 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1785
1786 IEM_MC_BEGIN(0, 2);
1787 IEM_MC_LOCAL(uint64_t, uSrc);
1788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1789
1790 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1792 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1793 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1794
1795 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1796 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1797
1798 IEM_MC_ADVANCE_RIP();
1799 IEM_MC_END();
1800 }
1801 return VINF_SUCCESS;
1802}
1803
1804
1805/**
1806 * @opcode 0x16
1807 * @opcodesub !11 mr/reg
1808 * @oppfx 0x66
1809 * @opcpuid sse2
1810 * @opgroup og_sse2_pcksclr_datamove
1811 * @opxcpttype 5
1812 * @optest op1=1 op2=2 -> op1=2
1813 * @optest op1=0 op2=-42 -> op1=-42
1814 */
1815FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1816{
1817 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1818 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1819 {
1820 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1821 IEM_MC_BEGIN(0, 2);
1822 IEM_MC_LOCAL(uint64_t, uSrc);
1823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1824
1825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1827 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1828 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1829
1830 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1831 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1832
1833 IEM_MC_ADVANCE_RIP();
1834 IEM_MC_END();
1835 return VINF_SUCCESS;
1836 }
1837
1838 /**
1839 * @opdone
1840 * @opmnemonic ud660f16m3
1841 * @opcode 0x16
1842 * @opcodesub 11 mr/reg
1843 * @oppfx 0x66
1844 * @opunused immediate
1845 * @opcpuid sse
1846 * @optest ->
1847 */
1848 return IEMOP_RAISE_INVALID_OPCODE();
1849}
1850
1851
1852/**
1853 * @opcode 0x16
1854 * @oppfx 0xf3
1855 * @opcpuid sse3
1856 * @opgroup og_sse3_pcksclr_datamove
1857 * @opxcpttype 4
1858 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1859 * op1=0x00000002000000020000000100000001
1860 */
1861FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1862{
1863 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1864 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1865 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1866 {
1867 /*
1868 * Register, register.
1869 */
1870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1871 IEM_MC_BEGIN(2, 0);
1872 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1873 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1874
1875 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1876 IEM_MC_PREPARE_SSE_USAGE();
1877
1878 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1879 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1880 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1881
1882 IEM_MC_ADVANCE_RIP();
1883 IEM_MC_END();
1884 }
1885 else
1886 {
1887 /*
1888 * Register, memory.
1889 */
1890 IEM_MC_BEGIN(2, 2);
1891 IEM_MC_LOCAL(RTUINT128U, uSrc);
1892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1893 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1894 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1895
1896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1898 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1899 IEM_MC_PREPARE_SSE_USAGE();
1900
1901 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1902 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1903 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1904
1905 IEM_MC_ADVANCE_RIP();
1906 IEM_MC_END();
1907 }
1908 return VINF_SUCCESS;
1909}
1910
1911/**
1912 * @opdone
1913 * @opmnemonic udf30f16
1914 * @opcode 0x16
1915 * @oppfx 0xf2
1916 * @opunused intel-modrm
1917 * @opcpuid sse
1918 * @optest ->
1919 * @opdone
1920 */
1921
1922
1923/**
1924 * @opcode 0x17
1925 * @opcodesub !11 mr/reg
1926 * @oppfx none
1927 * @opcpuid sse
1928 * @opgroup og_sse_simdfp_datamove
1929 * @opxcpttype 5
1930 * @optest op1=1 op2=2 -> op1=2
1931 * @optest op1=0 op2=-42 -> op1=-42
1932 */
1933FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
1934{
1935 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1936 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1937 {
1938 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1939
1940 IEM_MC_BEGIN(0, 2);
1941 IEM_MC_LOCAL(uint64_t, uSrc);
1942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1943
1944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1946 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1947 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1948
1949 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1950 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1951
1952 IEM_MC_ADVANCE_RIP();
1953 IEM_MC_END();
1954 return VINF_SUCCESS;
1955 }
1956
1957 /**
1958 * @opdone
1959 * @opmnemonic ud0f17m3
1960 * @opcode 0x17
1961 * @opcodesub 11 mr/reg
1962 * @oppfx none
1963 * @opunused immediate
1964 * @opcpuid sse
1965 * @optest ->
1966 */
1967 return IEMOP_RAISE_INVALID_OPCODE();
1968}
1969
1970
1971/**
1972 * @opcode 0x17
1973 * @opcodesub !11 mr/reg
1974 * @oppfx 0x66
1975 * @opcpuid sse2
1976 * @opgroup og_sse2_pcksclr_datamove
1977 * @opxcpttype 5
1978 * @optest op1=1 op2=2 -> op1=2
1979 * @optest op1=0 op2=-42 -> op1=-42
1980 */
1981FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
1982{
1983 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1984 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1985 {
1986 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1987
1988 IEM_MC_BEGIN(0, 2);
1989 IEM_MC_LOCAL(uint64_t, uSrc);
1990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1991
1992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1994 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1995 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1996
1997 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1998 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1999
2000 IEM_MC_ADVANCE_RIP();
2001 IEM_MC_END();
2002 return VINF_SUCCESS;
2003 }
2004
2005 /**
2006 * @opdone
2007 * @opmnemonic ud660f17m3
2008 * @opcode 0x17
2009 * @opcodesub 11 mr/reg
2010 * @oppfx 0x66
2011 * @opunused immediate
2012 * @opcpuid sse
2013 * @optest ->
2014 */
2015 return IEMOP_RAISE_INVALID_OPCODE();
2016}
2017
2018
2019/**
2020 * @opdone
2021 * @opmnemonic udf30f17
2022 * @opcode 0x17
2023 * @oppfx 0xf3
2024 * @opunused intel-modrm
2025 * @opcpuid sse
2026 * @optest ->
2027 * @opdone
2028 */
2029
2030/**
2031 * @opmnemonic udf20f17
2032 * @opcode 0x17
2033 * @oppfx 0xf2
2034 * @opunused intel-modrm
2035 * @opcpuid sse
2036 * @optest ->
2037 * @opdone
2038 */
2039
2040
2041/** Opcode 0x0f 0x18. */
2042FNIEMOP_DEF(iemOp_prefetch_Grp16)
2043{
2044 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2045 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2046 {
2047 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2048 {
2049 case 4: /* Aliased to /0 for the time being according to AMD. */
2050 case 5: /* Aliased to /0 for the time being according to AMD. */
2051 case 6: /* Aliased to /0 for the time being according to AMD. */
2052 case 7: /* Aliased to /0 for the time being according to AMD. */
2053 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2054 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2055 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2056 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2057 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2058 }
2059
2060 IEM_MC_BEGIN(0, 1);
2061 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2064 /* Currently a NOP. */
2065 NOREF(GCPtrEffSrc);
2066 IEM_MC_ADVANCE_RIP();
2067 IEM_MC_END();
2068 return VINF_SUCCESS;
2069 }
2070
2071 return IEMOP_RAISE_INVALID_OPCODE();
2072}
2073
2074
2075/** Opcode 0x0f 0x19..0x1f. */
2076FNIEMOP_DEF(iemOp_nop_Ev)
2077{
2078 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2080 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2081 {
2082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2083 IEM_MC_BEGIN(0, 0);
2084 IEM_MC_ADVANCE_RIP();
2085 IEM_MC_END();
2086 }
2087 else
2088 {
2089 IEM_MC_BEGIN(0, 1);
2090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2093 /* Currently a NOP. */
2094 NOREF(GCPtrEffSrc);
2095 IEM_MC_ADVANCE_RIP();
2096 IEM_MC_END();
2097 }
2098 return VINF_SUCCESS;
2099}
2100
2101
2102/** Opcode 0x0f 0x20. */
2103FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2104{
2105 /* mod is ignored, as is operand size overrides. */
2106 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2107 IEMOP_HLP_MIN_386();
2108 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2109 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2110 else
2111 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2112
2113 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2114 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2115 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2116 {
2117 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2118 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2119 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2120 iCrReg |= 8;
2121 }
2122 switch (iCrReg)
2123 {
2124 case 0: case 2: case 3: case 4: case 8:
2125 break;
2126 default:
2127 return IEMOP_RAISE_INVALID_OPCODE();
2128 }
2129 IEMOP_HLP_DONE_DECODING();
2130
2131 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2132}
2133
2134
2135/** Opcode 0x0f 0x21. */
2136FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2137{
2138 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2139 IEMOP_HLP_MIN_386();
2140 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2142 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2143 return IEMOP_RAISE_INVALID_OPCODE();
2144 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2145 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2146 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2147}
2148
2149
2150/** Opcode 0x0f 0x22. */
2151FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2152{
2153 /* mod is ignored, as is operand size overrides. */
2154 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2155 IEMOP_HLP_MIN_386();
2156 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2157 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2158 else
2159 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2160
2161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2162 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2163 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2164 {
2165 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2166 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2167 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2168 iCrReg |= 8;
2169 }
2170 switch (iCrReg)
2171 {
2172 case 0: case 2: case 3: case 4: case 8:
2173 break;
2174 default:
2175 return IEMOP_RAISE_INVALID_OPCODE();
2176 }
2177 IEMOP_HLP_DONE_DECODING();
2178
2179 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2180}
2181
2182
2183/** Opcode 0x0f 0x23. */
2184FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2185{
2186 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2187 IEMOP_HLP_MIN_386();
2188 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2190 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2191 return IEMOP_RAISE_INVALID_OPCODE();
2192 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2193 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2194 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2195}
2196
2197
2198/** Opcode 0x0f 0x24. */
2199FNIEMOP_DEF(iemOp_mov_Rd_Td)
2200{
2201 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2202 /** @todo works on 386 and 486. */
2203 /* The RM byte is not considered, see testcase. */
2204 return IEMOP_RAISE_INVALID_OPCODE();
2205}
2206
2207
2208/** Opcode 0x0f 0x26. */
2209FNIEMOP_DEF(iemOp_mov_Td_Rd)
2210{
2211 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2212 /** @todo works on 386 and 486. */
2213 /* The RM byte is not considered, see testcase. */
2214 return IEMOP_RAISE_INVALID_OPCODE();
2215}
2216
2217
2218/**
2219 * @opcode 0x28
2220 * @oppfx none
2221 * @opcpuid sse
2222 * @opgroup og_sse_simdfp_datamove
2223 * @opxcpttype 1
2224 * @optest op1=1 op2=2 -> op1=2
2225 * @optest op1=0 op2=-42 -> op1=-42
2226 */
2227FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2228{
2229 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2230 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2231 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2232 {
2233 /*
2234 * Register, register.
2235 */
2236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2237 IEM_MC_BEGIN(0, 0);
2238 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2239 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2240 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2241 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2242 IEM_MC_ADVANCE_RIP();
2243 IEM_MC_END();
2244 }
2245 else
2246 {
2247 /*
2248 * Register, memory.
2249 */
2250 IEM_MC_BEGIN(0, 2);
2251 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2253
2254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2256 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2257 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2258
2259 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2260 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2261
2262 IEM_MC_ADVANCE_RIP();
2263 IEM_MC_END();
2264 }
2265 return VINF_SUCCESS;
2266}
2267
2268/**
2269 * @opcode 0x28
2270 * @oppfx 66
2271 * @opcpuid sse2
2272 * @opgroup og_sse2_pcksclr_datamove
2273 * @opxcpttype 1
2274 * @optest op1=1 op2=2 -> op1=2
2275 * @optest op1=0 op2=-42 -> op1=-42
2276 */
2277FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2278{
2279 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2280 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2281 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2282 {
2283 /*
2284 * Register, register.
2285 */
2286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2287 IEM_MC_BEGIN(0, 0);
2288 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2289 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2290 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2291 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2292 IEM_MC_ADVANCE_RIP();
2293 IEM_MC_END();
2294 }
2295 else
2296 {
2297 /*
2298 * Register, memory.
2299 */
2300 IEM_MC_BEGIN(0, 2);
2301 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2303
2304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2306 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2307 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2308
2309 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2310 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2311
2312 IEM_MC_ADVANCE_RIP();
2313 IEM_MC_END();
2314 }
2315 return VINF_SUCCESS;
2316}
2317
2318/* Opcode 0xf3 0x0f 0x28 - invalid */
2319/* Opcode 0xf2 0x0f 0x28 - invalid */
2320
2321/**
2322 * @opcode 0x29
2323 * @oppfx none
2324 * @opcpuid sse
2325 * @opgroup og_sse_simdfp_datamove
2326 * @opxcpttype 1
2327 * @optest op1=1 op2=2 -> op1=2
2328 * @optest op1=0 op2=-42 -> op1=-42
2329 */
2330FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2331{
2332 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2333 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2334 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2335 {
2336 /*
2337 * Register, register.
2338 */
2339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2340 IEM_MC_BEGIN(0, 0);
2341 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2342 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2343 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2344 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2345 IEM_MC_ADVANCE_RIP();
2346 IEM_MC_END();
2347 }
2348 else
2349 {
2350 /*
2351 * Memory, register.
2352 */
2353 IEM_MC_BEGIN(0, 2);
2354 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2356
2357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2359 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2360 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2361
2362 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2363 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2364
2365 IEM_MC_ADVANCE_RIP();
2366 IEM_MC_END();
2367 }
2368 return VINF_SUCCESS;
2369}
2370
2371/**
2372 * @opcode 0x29
2373 * @oppfx 66
2374 * @opcpuid sse2
2375 * @opgroup og_sse2_pcksclr_datamove
2376 * @opxcpttype 1
2377 * @optest op1=1 op2=2 -> op1=2
2378 * @optest op1=0 op2=-42 -> op1=-42
2379 */
2380FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2381{
2382 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2383 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2384 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2385 {
2386 /*
2387 * Register, register.
2388 */
2389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2390 IEM_MC_BEGIN(0, 0);
2391 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2392 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2393 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2394 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2395 IEM_MC_ADVANCE_RIP();
2396 IEM_MC_END();
2397 }
2398 else
2399 {
2400 /*
2401 * Memory, register.
2402 */
2403 IEM_MC_BEGIN(0, 2);
2404 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2406
2407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2409 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2410 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2411
2412 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2413 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2414
2415 IEM_MC_ADVANCE_RIP();
2416 IEM_MC_END();
2417 }
2418 return VINF_SUCCESS;
2419}
2420
2421/* Opcode 0xf3 0x0f 0x29 - invalid */
2422/* Opcode 0xf2 0x0f 0x29 - invalid */
2423
2424
2425/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2426FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2427/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2428FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2429/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2430FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2431/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2432FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2433
2434
2435/**
2436 * @opcode 0x2b
2437 * @opcodesub !11 mr/reg
2438 * @oppfx none
2439 * @opcpuid sse
2440 * @opgroup og_sse1_cachect
2441 * @opxcpttype 1
2442 * @optest op1=1 op2=2 -> op1=2
2443 * @optest op1=0 op2=-42 -> op1=-42
2444 */
2445FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2446{
2447 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2448 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2449 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2450 {
2451 /*
2452 * memory, register.
2453 */
2454 IEM_MC_BEGIN(0, 2);
2455 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2457
2458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2460 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2461 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2462
2463 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2464 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2465
2466 IEM_MC_ADVANCE_RIP();
2467 IEM_MC_END();
2468 }
2469 /* The register, register encoding is invalid. */
2470 else
2471 return IEMOP_RAISE_INVALID_OPCODE();
2472 return VINF_SUCCESS;
2473}
2474
2475/**
2476 * @opcode 0x2b
2477 * @opcodesub !11 mr/reg
2478 * @oppfx 0x66
2479 * @opcpuid sse2
2480 * @opgroup og_sse2_cachect
2481 * @opxcpttype 1
2482 * @optest op1=1 op2=2 -> op1=2
2483 * @optest op1=0 op2=-42 -> op1=-42
2484 */
2485FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2486{
2487 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2488 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2489 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2490 {
2491 /*
2492 * memory, register.
2493 */
2494 IEM_MC_BEGIN(0, 2);
2495 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2497
2498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2500 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2501 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2502
2503 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2504 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2505
2506 IEM_MC_ADVANCE_RIP();
2507 IEM_MC_END();
2508 }
2509 /* The register, register encoding is invalid. */
2510 else
2511 return IEMOP_RAISE_INVALID_OPCODE();
2512 return VINF_SUCCESS;
2513}
2514/* Opcode 0xf3 0x0f 0x2b - invalid */
2515/* Opcode 0xf2 0x0f 0x2b - invalid */
2516
2517
2518/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2519FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2520/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2521FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2522/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2523FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2524/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2525FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2526
2527/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2528FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2529/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2530FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2531/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2532FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2533/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2534FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2535
2536/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2537FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2538/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2539FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2540/* Opcode 0xf3 0x0f 0x2e - invalid */
2541/* Opcode 0xf2 0x0f 0x2e - invalid */
2542
2543/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2544FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2545/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2546FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2547/* Opcode 0xf3 0x0f 0x2f - invalid */
2548/* Opcode 0xf2 0x0f 0x2f - invalid */
2549
2550/** Opcode 0x0f 0x30. */
2551FNIEMOP_DEF(iemOp_wrmsr)
2552{
2553 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2555 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2556}
2557
2558
2559/** Opcode 0x0f 0x31. */
2560FNIEMOP_DEF(iemOp_rdtsc)
2561{
2562 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2564 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2565}
2566
2567
2568/** Opcode 0x0f 0x33. */
2569FNIEMOP_DEF(iemOp_rdmsr)
2570{
2571 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2573 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2574}
2575
2576
2577/** Opcode 0x0f 0x34. */
2578FNIEMOP_DEF(iemOp_rdpmc)
2579{
2580 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2582 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2583}
2584
2585
2586/** Opcode 0x0f 0x34. */
2587FNIEMOP_STUB(iemOp_sysenter);
2588/** Opcode 0x0f 0x35. */
2589FNIEMOP_STUB(iemOp_sysexit);
2590/** Opcode 0x0f 0x37. */
2591FNIEMOP_STUB(iemOp_getsec);
2592
2593
2594/** Opcode 0x0f 0x38. */
2595FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2596{
2597#ifdef IEM_WITH_THREE_0F_38
2598 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2599 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2600#else
2601 IEMOP_BITCH_ABOUT_STUB();
2602 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2603#endif
2604}
2605
2606
2607/** Opcode 0x0f 0x3a. */
2608FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2609{
2610#ifdef IEM_WITH_THREE_0F_3A
2611 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2612 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2613#else
2614 IEMOP_BITCH_ABOUT_STUB();
2615 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2616#endif
2617}
2618
2619
2620/**
2621 * Implements a conditional move.
2622 *
2623 * Wish there was an obvious way to do this where we could share and reduce
2624 * code bloat.
2625 *
2626 * @param a_Cnd The conditional "microcode" operation.
2627 */
2628#define CMOV_X(a_Cnd) \
2629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2630 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2631 { \
2632 switch (pVCpu->iem.s.enmEffOpSize) \
2633 { \
2634 case IEMMODE_16BIT: \
2635 IEM_MC_BEGIN(0, 1); \
2636 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2637 a_Cnd { \
2638 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2639 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2640 } IEM_MC_ENDIF(); \
2641 IEM_MC_ADVANCE_RIP(); \
2642 IEM_MC_END(); \
2643 return VINF_SUCCESS; \
2644 \
2645 case IEMMODE_32BIT: \
2646 IEM_MC_BEGIN(0, 1); \
2647 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2648 a_Cnd { \
2649 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2650 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2651 } IEM_MC_ELSE() { \
2652 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2653 } IEM_MC_ENDIF(); \
2654 IEM_MC_ADVANCE_RIP(); \
2655 IEM_MC_END(); \
2656 return VINF_SUCCESS; \
2657 \
2658 case IEMMODE_64BIT: \
2659 IEM_MC_BEGIN(0, 1); \
2660 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2661 a_Cnd { \
2662 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2663 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2664 } IEM_MC_ENDIF(); \
2665 IEM_MC_ADVANCE_RIP(); \
2666 IEM_MC_END(); \
2667 return VINF_SUCCESS; \
2668 \
2669 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2670 } \
2671 } \
2672 else \
2673 { \
2674 switch (pVCpu->iem.s.enmEffOpSize) \
2675 { \
2676 case IEMMODE_16BIT: \
2677 IEM_MC_BEGIN(0, 2); \
2678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2679 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2681 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2682 a_Cnd { \
2683 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2684 } IEM_MC_ENDIF(); \
2685 IEM_MC_ADVANCE_RIP(); \
2686 IEM_MC_END(); \
2687 return VINF_SUCCESS; \
2688 \
2689 case IEMMODE_32BIT: \
2690 IEM_MC_BEGIN(0, 2); \
2691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2692 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2694 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2695 a_Cnd { \
2696 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2697 } IEM_MC_ELSE() { \
2698 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2699 } IEM_MC_ENDIF(); \
2700 IEM_MC_ADVANCE_RIP(); \
2701 IEM_MC_END(); \
2702 return VINF_SUCCESS; \
2703 \
2704 case IEMMODE_64BIT: \
2705 IEM_MC_BEGIN(0, 2); \
2706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2707 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2709 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2710 a_Cnd { \
2711 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2712 } IEM_MC_ENDIF(); \
2713 IEM_MC_ADVANCE_RIP(); \
2714 IEM_MC_END(); \
2715 return VINF_SUCCESS; \
2716 \
2717 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2718 } \
2719 } do {} while (0)
2720
2721
2722
2723/** Opcode 0x0f 0x40. */
2724FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2725{
2726 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2727 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2728}
2729
2730
2731/** Opcode 0x0f 0x41. */
2732FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2733{
2734 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2735 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2736}
2737
2738
2739/** Opcode 0x0f 0x42. */
2740FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2741{
2742 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2743 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2744}
2745
2746
2747/** Opcode 0x0f 0x43. */
2748FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2749{
2750 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2751 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2752}
2753
2754
2755/** Opcode 0x0f 0x44. */
2756FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2757{
2758 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2759 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2760}
2761
2762
2763/** Opcode 0x0f 0x45. */
2764FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2765{
2766 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2767 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2768}
2769
2770
2771/** Opcode 0x0f 0x46. */
2772FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2773{
2774 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2775 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2776}
2777
2778
2779/** Opcode 0x0f 0x47. */
2780FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2781{
2782 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2783 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2784}
2785
2786
2787/** Opcode 0x0f 0x48. */
2788FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2789{
2790 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2791 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2792}
2793
2794
2795/** Opcode 0x0f 0x49. */
2796FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2797{
2798 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2799 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2800}
2801
2802
2803/** Opcode 0x0f 0x4a. */
2804FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2805{
2806 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2807 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2808}
2809
2810
2811/** Opcode 0x0f 0x4b. */
2812FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2813{
2814 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2815 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2816}
2817
2818
2819/** Opcode 0x0f 0x4c. */
2820FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2821{
2822 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2823 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2824}
2825
2826
2827/** Opcode 0x0f 0x4d. */
2828FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2829{
2830 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2831 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2832}
2833
2834
2835/** Opcode 0x0f 0x4e. */
2836FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2837{
2838 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2839 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2840}
2841
2842
2843/** Opcode 0x0f 0x4f. */
2844FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2845{
2846 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2847 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2848}
2849
2850#undef CMOV_X
2851
2852/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2853FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2854/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2855FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2856/* Opcode 0xf3 0x0f 0x50 - invalid */
2857/* Opcode 0xf2 0x0f 0x50 - invalid */
2858
2859/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2860FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2861/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2862FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2863/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2864FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2865/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2866FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2867
2868/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2869FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2870/* Opcode 0x66 0x0f 0x52 - invalid */
2871/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2872FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2873/* Opcode 0xf2 0x0f 0x52 - invalid */
2874
2875/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2876FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2877/* Opcode 0x66 0x0f 0x53 - invalid */
2878/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2879FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2880/* Opcode 0xf2 0x0f 0x53 - invalid */
2881
2882/** Opcode 0x0f 0x54 - andps Vps, Wps */
2883FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2884/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2885FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2886/* Opcode 0xf3 0x0f 0x54 - invalid */
2887/* Opcode 0xf2 0x0f 0x54 - invalid */
2888
2889/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2890FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2891/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2892FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2893/* Opcode 0xf3 0x0f 0x55 - invalid */
2894/* Opcode 0xf2 0x0f 0x55 - invalid */
2895
2896/** Opcode 0x0f 0x56 - orps Vps, Wps */
2897FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2898/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2899FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2900/* Opcode 0xf3 0x0f 0x56 - invalid */
2901/* Opcode 0xf2 0x0f 0x56 - invalid */
2902
2903/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2904FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2905/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2906FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2907/* Opcode 0xf3 0x0f 0x57 - invalid */
2908/* Opcode 0xf2 0x0f 0x57 - invalid */
2909
2910/** Opcode 0x0f 0x58 - addps Vps, Wps */
2911FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2912/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2913FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2914/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2915FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2916/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2917FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2918
2919/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2920FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2921/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2922FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2923/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2924FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2925/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2926FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2927
2928/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2929FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2930/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2931FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2932/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2933FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2934/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2935FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2936
2937/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2938FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2939/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2940FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
2941/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
2942FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
2943/* Opcode 0xf2 0x0f 0x5b - invalid */
2944
2945/** Opcode 0x0f 0x5c - subps Vps, Wps */
2946FNIEMOP_STUB(iemOp_subps_Vps_Wps);
2947/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
2948FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
2949/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
2950FNIEMOP_STUB(iemOp_subss_Vss_Wss);
2951/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
2952FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
2953
2954/** Opcode 0x0f 0x5d - minps Vps, Wps */
2955FNIEMOP_STUB(iemOp_minps_Vps_Wps);
2956/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
2957FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
2958/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
2959FNIEMOP_STUB(iemOp_minss_Vss_Wss);
2960/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
2961FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
2962
2963/** Opcode 0x0f 0x5e - divps Vps, Wps */
2964FNIEMOP_STUB(iemOp_divps_Vps_Wps);
2965/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
2966FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
2967/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
2968FNIEMOP_STUB(iemOp_divss_Vss_Wss);
2969/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
2970FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
2971
2972/** Opcode 0x0f 0x5f - maxps Vps, Wps */
2973FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
2974/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
2975FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
2976/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
2977FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
2978/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
2979FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
2980
2981/**
2982 * Common worker for MMX instructions on the forms:
2983 * pxxxx mm1, mm2/mem32
2984 *
2985 * The 2nd operand is the first half of a register, which in the memory case
2986 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2987 * memory accessed for MMX.
2988 *
2989 * Exceptions type 4.
2990 */
2991FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2992{
2993 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2994 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2995 {
2996 /*
2997 * Register, register.
2998 */
2999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3000 IEM_MC_BEGIN(2, 0);
3001 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3002 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3003 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3004 IEM_MC_PREPARE_SSE_USAGE();
3005 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3006 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3007 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3008 IEM_MC_ADVANCE_RIP();
3009 IEM_MC_END();
3010 }
3011 else
3012 {
3013 /*
3014 * Register, memory.
3015 */
3016 IEM_MC_BEGIN(2, 2);
3017 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3018 IEM_MC_LOCAL(uint64_t, uSrc);
3019 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3021
3022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3024 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3025 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3026
3027 IEM_MC_PREPARE_SSE_USAGE();
3028 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3029 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3030
3031 IEM_MC_ADVANCE_RIP();
3032 IEM_MC_END();
3033 }
3034 return VINF_SUCCESS;
3035}
3036
3037
3038/**
3039 * Common worker for SSE2 instructions on the forms:
3040 * pxxxx xmm1, xmm2/mem128
3041 *
3042 * The 2nd operand is the first half of a register, which in the memory case
3043 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3044 * memory accessed for MMX.
3045 *
3046 * Exceptions type 4.
3047 */
3048FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3049{
3050 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3051 if (!pImpl->pfnU64)
3052 return IEMOP_RAISE_INVALID_OPCODE();
3053 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3054 {
3055 /*
3056 * Register, register.
3057 */
3058 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3059 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3061 IEM_MC_BEGIN(2, 0);
3062 IEM_MC_ARG(uint64_t *, pDst, 0);
3063 IEM_MC_ARG(uint32_t const *, pSrc, 1);
3064 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3065 IEM_MC_PREPARE_FPU_USAGE();
3066 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3067 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3068 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3069 IEM_MC_ADVANCE_RIP();
3070 IEM_MC_END();
3071 }
3072 else
3073 {
3074 /*
3075 * Register, memory.
3076 */
3077 IEM_MC_BEGIN(2, 2);
3078 IEM_MC_ARG(uint64_t *, pDst, 0);
3079 IEM_MC_LOCAL(uint32_t, uSrc);
3080 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3082
3083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3085 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3086 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3087
3088 IEM_MC_PREPARE_FPU_USAGE();
3089 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3090 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3091
3092 IEM_MC_ADVANCE_RIP();
3093 IEM_MC_END();
3094 }
3095 return VINF_SUCCESS;
3096}
3097
3098
3099/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3100FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3101{
3102 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3103 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3104}
3105
3106/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3107FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3108{
3109 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3110 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3111}
3112
3113/* Opcode 0xf3 0x0f 0x60 - invalid */
3114
3115
3116/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3117FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3118{
3119 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3120 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3121}
3122
3123/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3124FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3125{
3126 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3127 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3128}
3129
3130/* Opcode 0xf3 0x0f 0x61 - invalid */
3131
3132
3133/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3134FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3135{
3136 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3137 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3138}
3139
3140/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3141FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3142{
3143 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3144 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3145}
3146
3147/* Opcode 0xf3 0x0f 0x62 - invalid */
3148
3149
3150
3151/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3152FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3153/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3154FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3155/* Opcode 0xf3 0x0f 0x63 - invalid */
3156
3157/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3158FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3159/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3160FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3161/* Opcode 0xf3 0x0f 0x64 - invalid */
3162
3163/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3164FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3165/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3166FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3167/* Opcode 0xf3 0x0f 0x65 - invalid */
3168
3169/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3170FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3171/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3172FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3173/* Opcode 0xf3 0x0f 0x66 - invalid */
3174
3175/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3176FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3177/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3178FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3179/* Opcode 0xf3 0x0f 0x67 - invalid */
3180
3181
3182/**
3183 * Common worker for MMX instructions on the form:
3184 * pxxxx mm1, mm2/mem64
3185 *
3186 * The 2nd operand is the second half of a register, which in the memory case
3187 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3188 * where it may read the full 128 bits or only the upper 64 bits.
3189 *
3190 * Exceptions type 4.
3191 */
3192FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3193{
3194 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3195 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3196 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3197 {
3198 /*
3199 * Register, register.
3200 */
3201 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3202 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3204 IEM_MC_BEGIN(2, 0);
3205 IEM_MC_ARG(uint64_t *, pDst, 0);
3206 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3207 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3208 IEM_MC_PREPARE_FPU_USAGE();
3209 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3210 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3211 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3212 IEM_MC_ADVANCE_RIP();
3213 IEM_MC_END();
3214 }
3215 else
3216 {
3217 /*
3218 * Register, memory.
3219 */
3220 IEM_MC_BEGIN(2, 2);
3221 IEM_MC_ARG(uint64_t *, pDst, 0);
3222 IEM_MC_LOCAL(uint64_t, uSrc);
3223 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3225
3226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3228 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3229 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3230
3231 IEM_MC_PREPARE_FPU_USAGE();
3232 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3233 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3234
3235 IEM_MC_ADVANCE_RIP();
3236 IEM_MC_END();
3237 }
3238 return VINF_SUCCESS;
3239}
3240
3241
3242/**
3243 * Common worker for SSE2 instructions on the form:
3244 * pxxxx xmm1, xmm2/mem128
3245 *
3246 * The 2nd operand is the second half of a register, which in the memory case
3247 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3248 * where it may read the full 128 bits or only the upper 64 bits.
3249 *
3250 * Exceptions type 4.
3251 */
3252FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3253{
3254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3255 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3256 {
3257 /*
3258 * Register, register.
3259 */
3260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3261 IEM_MC_BEGIN(2, 0);
3262 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3263 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3264 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3265 IEM_MC_PREPARE_SSE_USAGE();
3266 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3267 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3268 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3269 IEM_MC_ADVANCE_RIP();
3270 IEM_MC_END();
3271 }
3272 else
3273 {
3274 /*
3275 * Register, memory.
3276 */
3277 IEM_MC_BEGIN(2, 2);
3278 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3279 IEM_MC_LOCAL(RTUINT128U, uSrc);
3280 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3282
3283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3285 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3286 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3287
3288 IEM_MC_PREPARE_SSE_USAGE();
3289 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3290 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3291
3292 IEM_MC_ADVANCE_RIP();
3293 IEM_MC_END();
3294 }
3295 return VINF_SUCCESS;
3296}
3297
3298
3299/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3300FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3301{
3302 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3303 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3304}
3305
3306/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3307FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3308{
3309 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3310 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3311}
3312/* Opcode 0xf3 0x0f 0x68 - invalid */
3313
3314
3315/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3316FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3317{
3318 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3319 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3320}
3321
3322/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3323FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3324{
3325 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3326 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3327
3328}
3329/* Opcode 0xf3 0x0f 0x69 - invalid */
3330
3331
3332/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3333FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3334{
3335 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3336 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3337}
3338
3339/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3340FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3341{
3342 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3343 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3344}
3345/* Opcode 0xf3 0x0f 0x6a - invalid */
3346
3347
3348/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3349FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3350/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3351FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3352/* Opcode 0xf3 0x0f 0x6b - invalid */
3353
3354
3355/* Opcode 0x0f 0x6c - invalid */
3356
3357/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3358FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3359{
3360 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3361 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3362}
3363
3364/* Opcode 0xf3 0x0f 0x6c - invalid */
3365/* Opcode 0xf2 0x0f 0x6c - invalid */
3366
3367
3368/* Opcode 0x0f 0x6d - invalid */
3369
3370/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3371FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3372{
3373 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3374 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3375}
3376
3377/* Opcode 0xf3 0x0f 0x6d - invalid */
3378
3379
3380FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3381{
3382 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3383 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3384 {
3385 /**
3386 * @opcode 0x6e
3387 * @opcodesub rex.w=1
3388 * @oppfx none
3389 * @opcpuid mmx
3390 * @opgroup og_mmx_datamove
3391 * @opxcpttype 5
3392 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
3393 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
3394 */
3395 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3396 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3397 {
3398 /* MMX, greg64 */
3399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3400 IEM_MC_BEGIN(0, 1);
3401 IEM_MC_LOCAL(uint64_t, u64Tmp);
3402
3403 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3404 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3405
3406 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3407 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3408 IEM_MC_FPU_TO_MMX_MODE();
3409
3410 IEM_MC_ADVANCE_RIP();
3411 IEM_MC_END();
3412 }
3413 else
3414 {
3415 /* MMX, [mem64] */
3416 IEM_MC_BEGIN(0, 2);
3417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3418 IEM_MC_LOCAL(uint64_t, u64Tmp);
3419
3420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3422 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3423 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3424
3425 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3426 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3427 IEM_MC_FPU_TO_MMX_MODE();
3428
3429 IEM_MC_ADVANCE_RIP();
3430 IEM_MC_END();
3431 }
3432 }
3433 else
3434 {
3435 /**
3436 * @opdone
3437 * @opcode 0x6e
3438 * @opcodesub rex.w=0
3439 * @oppfx none
3440 * @opcpuid mmx
3441 * @opgroup og_mmx_datamove
3442 * @opxcpttype 5
3443 * @opfunction iemOp_movd_q_Pd_Ey
3444 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3445 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3446 */
3447 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3448 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3449 {
3450 /* MMX, greg */
3451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3452 IEM_MC_BEGIN(0, 1);
3453 IEM_MC_LOCAL(uint64_t, u64Tmp);
3454
3455 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3456 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3457
3458 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3459 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3460 IEM_MC_FPU_TO_MMX_MODE();
3461
3462 IEM_MC_ADVANCE_RIP();
3463 IEM_MC_END();
3464 }
3465 else
3466 {
3467 /* MMX, [mem] */
3468 IEM_MC_BEGIN(0, 2);
3469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3470 IEM_MC_LOCAL(uint32_t, u32Tmp);
3471
3472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3474 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3475 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3476
3477 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3478 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3479 IEM_MC_FPU_TO_MMX_MODE();
3480
3481 IEM_MC_ADVANCE_RIP();
3482 IEM_MC_END();
3483 }
3484 }
3485 return VINF_SUCCESS;
3486}
3487
3488FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3489{
3490 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3491 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3492 {
3493 /**
3494 * @opcode 0x6e
3495 * @opcodesub rex.w=1
3496 * @oppfx 0x66
3497 * @opcpuid sse2
3498 * @opgroup og_sse2_simdint_datamove
3499 * @opxcpttype 5
3500 * @optest 64-bit / op1=1 op2=2 -> op1=2
3501 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3502 */
3503 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3504 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3505 {
3506 /* XMM, greg64 */
3507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3508 IEM_MC_BEGIN(0, 1);
3509 IEM_MC_LOCAL(uint64_t, u64Tmp);
3510
3511 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3512 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3513
3514 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3515 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3516
3517 IEM_MC_ADVANCE_RIP();
3518 IEM_MC_END();
3519 }
3520 else
3521 {
3522 /* XMM, [mem64] */
3523 IEM_MC_BEGIN(0, 2);
3524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3525 IEM_MC_LOCAL(uint64_t, u64Tmp);
3526
3527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3529 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3530 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3531
3532 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3533 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3534
3535 IEM_MC_ADVANCE_RIP();
3536 IEM_MC_END();
3537 }
3538 }
3539 else
3540 {
3541 /**
3542 * @opdone
3543 * @opcode 0x6e
3544 * @opcodesub rex.w=0
3545 * @oppfx 0x66
3546 * @opcpuid sse2
3547 * @opgroup og_sse2_simdint_datamove
3548 * @opxcpttype 5
3549 * @opfunction iemOp_movd_q_Vy_Ey
3550 * @optest op1=1 op2=2 -> op1=2
3551 * @optest op1=0 op2=-42 -> op1=-42
3552 */
3553 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3554 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3555 {
3556 /* XMM, greg32 */
3557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3558 IEM_MC_BEGIN(0, 1);
3559 IEM_MC_LOCAL(uint32_t, u32Tmp);
3560
3561 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3562 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3563
3564 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3565 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3566
3567 IEM_MC_ADVANCE_RIP();
3568 IEM_MC_END();
3569 }
3570 else
3571 {
3572 /* XMM, [mem32] */
3573 IEM_MC_BEGIN(0, 2);
3574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3575 IEM_MC_LOCAL(uint32_t, u32Tmp);
3576
3577 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3579 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3580 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3581
3582 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3583 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3584
3585 IEM_MC_ADVANCE_RIP();
3586 IEM_MC_END();
3587 }
3588 }
3589 return VINF_SUCCESS;
3590}
3591
3592/* Opcode 0xf3 0x0f 0x6e - invalid */
3593
3594
3595/**
3596 * @opcode 0x6f
3597 * @oppfx none
3598 * @opcpuid mmx
3599 * @opgroup og_mmx_datamove
3600 * @opxcpttype 5
3601 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3602 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3603 */
3604FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3605{
3606 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3607 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3608 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3609 {
3610 /*
3611 * Register, register.
3612 */
3613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3614 IEM_MC_BEGIN(0, 1);
3615 IEM_MC_LOCAL(uint64_t, u64Tmp);
3616
3617 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3618 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3619
3620 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3621 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3622 IEM_MC_FPU_TO_MMX_MODE();
3623
3624 IEM_MC_ADVANCE_RIP();
3625 IEM_MC_END();
3626 }
3627 else
3628 {
3629 /*
3630 * Register, memory.
3631 */
3632 IEM_MC_BEGIN(0, 2);
3633 IEM_MC_LOCAL(uint64_t, u64Tmp);
3634 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3635
3636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3638 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3639 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3640
3641 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3642 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3643 IEM_MC_FPU_TO_MMX_MODE();
3644
3645 IEM_MC_ADVANCE_RIP();
3646 IEM_MC_END();
3647 }
3648 return VINF_SUCCESS;
3649}
3650
3651/**
3652 * @opcode 0x6f
3653 * @oppfx 0x66
3654 * @opcpuid sse2
3655 * @opgroup og_sse2_simdint_datamove
3656 * @opxcpttype 1
3657 * @optest op1=1 op2=2 -> op1=2
3658 * @optest op1=0 op2=-42 -> op1=-42
3659 */
3660FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
3661{
3662 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3663 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3664 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3665 {
3666 /*
3667 * Register, register.
3668 */
3669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3670 IEM_MC_BEGIN(0, 0);
3671
3672 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3673 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3674
3675 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3676 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3677 IEM_MC_ADVANCE_RIP();
3678 IEM_MC_END();
3679 }
3680 else
3681 {
3682 /*
3683 * Register, memory.
3684 */
3685 IEM_MC_BEGIN(0, 2);
3686 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3688
3689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3691 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3692 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3693
3694 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3695 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3696
3697 IEM_MC_ADVANCE_RIP();
3698 IEM_MC_END();
3699 }
3700 return VINF_SUCCESS;
3701}
3702
3703/**
3704 * @opcode 0x6f
3705 * @oppfx 0xf3
3706 * @opcpuid sse2
3707 * @opgroup og_sse2_simdint_datamove
3708 * @opxcpttype 4UA
3709 * @optest op1=1 op2=2 -> op1=2
3710 * @optest op1=0 op2=-42 -> op1=-42
3711 */
3712FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
3713{
3714 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3716 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3717 {
3718 /*
3719 * Register, register.
3720 */
3721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3722 IEM_MC_BEGIN(0, 0);
3723 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3724 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3725 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3726 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3727 IEM_MC_ADVANCE_RIP();
3728 IEM_MC_END();
3729 }
3730 else
3731 {
3732 /*
3733 * Register, memory.
3734 */
3735 IEM_MC_BEGIN(0, 2);
3736 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3738
3739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3741 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3742 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3743 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3744 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3745
3746 IEM_MC_ADVANCE_RIP();
3747 IEM_MC_END();
3748 }
3749 return VINF_SUCCESS;
3750}
3751
3752
3753/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3754FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3755{
3756 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3757 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3758 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3759 {
3760 /*
3761 * Register, register.
3762 */
3763 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3765
3766 IEM_MC_BEGIN(3, 0);
3767 IEM_MC_ARG(uint64_t *, pDst, 0);
3768 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3769 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3770 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3771 IEM_MC_PREPARE_FPU_USAGE();
3772 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3773 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3774 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3775 IEM_MC_ADVANCE_RIP();
3776 IEM_MC_END();
3777 }
3778 else
3779 {
3780 /*
3781 * Register, memory.
3782 */
3783 IEM_MC_BEGIN(3, 2);
3784 IEM_MC_ARG(uint64_t *, pDst, 0);
3785 IEM_MC_LOCAL(uint64_t, uSrc);
3786 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3787 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3788
3789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3790 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3791 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3793 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3794
3795 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3796 IEM_MC_PREPARE_FPU_USAGE();
3797 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3798 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3799
3800 IEM_MC_ADVANCE_RIP();
3801 IEM_MC_END();
3802 }
3803 return VINF_SUCCESS;
3804}
3805
3806/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3807FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3808{
3809 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3810 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3811 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3812 {
3813 /*
3814 * Register, register.
3815 */
3816 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3818
3819 IEM_MC_BEGIN(3, 0);
3820 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3821 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3822 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3823 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3824 IEM_MC_PREPARE_SSE_USAGE();
3825 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3826 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3827 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3828 IEM_MC_ADVANCE_RIP();
3829 IEM_MC_END();
3830 }
3831 else
3832 {
3833 /*
3834 * Register, memory.
3835 */
3836 IEM_MC_BEGIN(3, 2);
3837 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3838 IEM_MC_LOCAL(RTUINT128U, uSrc);
3839 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3840 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3841
3842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3843 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3844 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3846 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3847
3848 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3849 IEM_MC_PREPARE_SSE_USAGE();
3850 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3851 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3852
3853 IEM_MC_ADVANCE_RIP();
3854 IEM_MC_END();
3855 }
3856 return VINF_SUCCESS;
3857}
3858
3859/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3860FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3861{
3862 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3863 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3864 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3865 {
3866 /*
3867 * Register, register.
3868 */
3869 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3871
3872 IEM_MC_BEGIN(3, 0);
3873 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3874 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3875 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3876 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3877 IEM_MC_PREPARE_SSE_USAGE();
3878 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3879 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3880 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3881 IEM_MC_ADVANCE_RIP();
3882 IEM_MC_END();
3883 }
3884 else
3885 {
3886 /*
3887 * Register, memory.
3888 */
3889 IEM_MC_BEGIN(3, 2);
3890 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3891 IEM_MC_LOCAL(RTUINT128U, uSrc);
3892 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3893 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3894
3895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3896 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3897 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3899 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3900
3901 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3902 IEM_MC_PREPARE_SSE_USAGE();
3903 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3904 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3905
3906 IEM_MC_ADVANCE_RIP();
3907 IEM_MC_END();
3908 }
3909 return VINF_SUCCESS;
3910}
3911
3912/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3913FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3914{
3915 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3916 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3917 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3918 {
3919 /*
3920 * Register, register.
3921 */
3922 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3924
3925 IEM_MC_BEGIN(3, 0);
3926 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3927 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3928 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3929 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3930 IEM_MC_PREPARE_SSE_USAGE();
3931 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3932 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3933 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3934 IEM_MC_ADVANCE_RIP();
3935 IEM_MC_END();
3936 }
3937 else
3938 {
3939 /*
3940 * Register, memory.
3941 */
3942 IEM_MC_BEGIN(3, 2);
3943 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3944 IEM_MC_LOCAL(RTUINT128U, uSrc);
3945 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3947
3948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3949 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3950 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3952 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3953
3954 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3955 IEM_MC_PREPARE_SSE_USAGE();
3956 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3957 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3958
3959 IEM_MC_ADVANCE_RIP();
3960 IEM_MC_END();
3961 }
3962 return VINF_SUCCESS;
3963}
3964
3965
3966/** Opcode 0x0f 0x71 11/2. */
3967FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3968
3969/** Opcode 0x66 0x0f 0x71 11/2. */
3970FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3971
3972/** Opcode 0x0f 0x71 11/4. */
3973FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3974
3975/** Opcode 0x66 0x0f 0x71 11/4. */
3976FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3977
3978/** Opcode 0x0f 0x71 11/6. */
3979FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3980
3981/** Opcode 0x66 0x0f 0x71 11/6. */
3982FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3983
3984
3985/**
3986 * Group 12 jump table for register variant.
3987 */
3988IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3989{
3990 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3991 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3992 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3993 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3994 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3995 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3996 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3997 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3998};
3999AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
4000
4001
4002/** Opcode 0x0f 0x71. */
4003FNIEMOP_DEF(iemOp_Grp12)
4004{
4005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4006 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4007 /* register, register */
4008 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4009 + pVCpu->iem.s.idxPrefix], bRm);
4010 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4011}
4012
4013
4014/** Opcode 0x0f 0x72 11/2. */
4015FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
4016
4017/** Opcode 0x66 0x0f 0x72 11/2. */
4018FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
4019
4020/** Opcode 0x0f 0x72 11/4. */
4021FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
4022
4023/** Opcode 0x66 0x0f 0x72 11/4. */
4024FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
4025
4026/** Opcode 0x0f 0x72 11/6. */
4027FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
4028
4029/** Opcode 0x66 0x0f 0x72 11/6. */
4030FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
4031
4032
4033/**
4034 * Group 13 jump table for register variant.
4035 */
4036IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
4037{
4038 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4039 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4040 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4041 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4042 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4043 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4044 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4045 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4046};
4047AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
4048
4049/** Opcode 0x0f 0x72. */
4050FNIEMOP_DEF(iemOp_Grp13)
4051{
4052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4053 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4054 /* register, register */
4055 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4056 + pVCpu->iem.s.idxPrefix], bRm);
4057 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4058}
4059
4060
4061/** Opcode 0x0f 0x73 11/2. */
4062FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
4063
4064/** Opcode 0x66 0x0f 0x73 11/2. */
4065FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
4066
4067/** Opcode 0x66 0x0f 0x73 11/3. */
4068FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
4069
4070/** Opcode 0x0f 0x73 11/6. */
4071FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
4072
4073/** Opcode 0x66 0x0f 0x73 11/6. */
4074FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
4075
4076/** Opcode 0x66 0x0f 0x73 11/7. */
4077FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
4078
4079/**
4080 * Group 14 jump table for register variant.
4081 */
4082IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4083{
4084 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4085 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4086 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4087 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4088 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4089 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4090 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4091 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4092};
4093AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4094
4095
4096/** Opcode 0x0f 0x73. */
4097FNIEMOP_DEF(iemOp_Grp14)
4098{
4099 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4101 /* register, register */
4102 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4103 + pVCpu->iem.s.idxPrefix], bRm);
4104 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4105}
4106
4107
4108/**
4109 * Common worker for MMX instructions on the form:
4110 * pxxx mm1, mm2/mem64
4111 */
4112FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4113{
4114 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4115 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4116 {
4117 /*
4118 * Register, register.
4119 */
4120 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4121 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4123 IEM_MC_BEGIN(2, 0);
4124 IEM_MC_ARG(uint64_t *, pDst, 0);
4125 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4126 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4127 IEM_MC_PREPARE_FPU_USAGE();
4128 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4129 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
4130 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4131 IEM_MC_ADVANCE_RIP();
4132 IEM_MC_END();
4133 }
4134 else
4135 {
4136 /*
4137 * Register, memory.
4138 */
4139 IEM_MC_BEGIN(2, 2);
4140 IEM_MC_ARG(uint64_t *, pDst, 0);
4141 IEM_MC_LOCAL(uint64_t, uSrc);
4142 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4144
4145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4147 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4148 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4149
4150 IEM_MC_PREPARE_FPU_USAGE();
4151 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4152 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4153
4154 IEM_MC_ADVANCE_RIP();
4155 IEM_MC_END();
4156 }
4157 return VINF_SUCCESS;
4158}
4159
4160
4161/**
4162 * Common worker for SSE2 instructions on the forms:
4163 * pxxx xmm1, xmm2/mem128
4164 *
4165 * Proper alignment of the 128-bit operand is enforced.
4166 * Exceptions type 4. SSE2 cpuid checks.
4167 */
4168FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4169{
4170 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4171 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4172 {
4173 /*
4174 * Register, register.
4175 */
4176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4177 IEM_MC_BEGIN(2, 0);
4178 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4179 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4180 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4181 IEM_MC_PREPARE_SSE_USAGE();
4182 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4183 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4184 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4185 IEM_MC_ADVANCE_RIP();
4186 IEM_MC_END();
4187 }
4188 else
4189 {
4190 /*
4191 * Register, memory.
4192 */
4193 IEM_MC_BEGIN(2, 2);
4194 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4195 IEM_MC_LOCAL(RTUINT128U, uSrc);
4196 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4198
4199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4201 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4202 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4203
4204 IEM_MC_PREPARE_SSE_USAGE();
4205 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4206 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4207
4208 IEM_MC_ADVANCE_RIP();
4209 IEM_MC_END();
4210 }
4211 return VINF_SUCCESS;
4212}
4213
4214
4215/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4216FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4217{
4218 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
4219 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4220}
4221
4222/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4223FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4224{
4225 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
4226 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4227}
4228
4229/* Opcode 0xf3 0x0f 0x74 - invalid */
4230/* Opcode 0xf2 0x0f 0x74 - invalid */
4231
4232
4233/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4234FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4235{
4236 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
4237 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4238}
4239
4240/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4241FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4242{
4243 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
4244 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4245}
4246
4247/* Opcode 0xf3 0x0f 0x75 - invalid */
4248/* Opcode 0xf2 0x0f 0x75 - invalid */
4249
4250
4251/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4252FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4253{
4254 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
4255 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4256}
4257
4258/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4259FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4260{
4261 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
4262 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4263}
4264
4265/* Opcode 0xf3 0x0f 0x76 - invalid */
4266/* Opcode 0xf2 0x0f 0x76 - invalid */
4267
4268
4269/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4270FNIEMOP_DEF(iemOp_emms)
4271{
4272 IEMOP_MNEMONIC(emms, "emms");
4273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4274
4275 IEM_MC_BEGIN(0,0);
4276 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
4277 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4278 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4279 IEM_MC_FPU_FROM_MMX_MODE();
4280 IEM_MC_ADVANCE_RIP();
4281 IEM_MC_END();
4282 return VINF_SUCCESS;
4283}
4284
4285/* Opcode 0x66 0x0f 0x77 - invalid */
4286/* Opcode 0xf3 0x0f 0x77 - invalid */
4287/* Opcode 0xf2 0x0f 0x77 - invalid */
4288
4289/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4290#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4291FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
4292{
4293 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
4294 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVInstrDiag_Vmread);
4295 IEMOP_HLP_VMX_INSTR("vmread", kVmxVInstrDiag_Vmread);
4296 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
4297
4298 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4299 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4300 {
4301 /*
4302 * Register, register.
4303 */
4304 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4305 if (enmEffOpSize == IEMMODE_64BIT)
4306 {
4307 IEM_MC_BEGIN(2, 0);
4308 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4309 IEM_MC_ARG(uint64_t, u64Enc, 1);
4310 IEM_MC_FETCH_GREG_U64(u64Enc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4311 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4312 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread64_reg, pu64Dst, u64Enc);
4313 IEM_MC_END();
4314 }
4315 else
4316 {
4317 IEM_MC_BEGIN(2, 0);
4318 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4319 IEM_MC_ARG(uint32_t, u32Enc, 1);
4320 IEM_MC_FETCH_GREG_U32(u32Enc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4321 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4322 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread32_reg, pu32Dst, u32Enc);
4323 IEM_MC_END();
4324 }
4325 }
4326 else
4327 {
4328 /*
4329 * Register, memory.
4330 */
4331 if (enmEffOpSize == IEMMODE_64BIT)
4332 {
4333 IEM_MC_BEGIN(4, 0);
4334 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4335 IEM_MC_ARG_CONST(IEMMODE, enmEffAddrMode,/*=*/pVCpu->iem.s.enmEffAddrMode, 1);
4336 IEM_MC_ARG(RTGCPTR, GCPtrVal, 2);
4337 IEM_MC_ARG(uint64_t, u64Enc, 3);
4338 IEM_MC_FETCH_GREG_U64(u64Enc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4340 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4341 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4342 IEM_MC_CALL_CIMPL_4(iemCImpl_vmread_mem, iEffSeg, enmEffAddrMode, GCPtrVal, u64Enc);
4343 IEM_MC_END();
4344 }
4345 else
4346 {
4347 IEM_MC_BEGIN(4, 0);
4348 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4349 IEM_MC_ARG_CONST(IEMMODE, enmEffAddrMode,/*=*/pVCpu->iem.s.enmEffAddrMode, 1);
4350 IEM_MC_ARG(RTGCPTR, GCPtrVal, 2);
4351 IEM_MC_ARG(uint32_t, u32Enc, 3);
4352 IEM_MC_FETCH_GREG_U32(u32Enc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4354 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4355 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4356 IEM_MC_CALL_CIMPL_4(iemCImpl_vmread_mem, iEffSeg, enmEffAddrMode, GCPtrVal, u32Enc);
4357 IEM_MC_END();
4358 }
4359 }
4360 return VINF_SUCCESS;
4361}
4362#else
4363FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4364#endif
4365
4366/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4367FNIEMOP_STUB(iemOp_AmdGrp17);
4368/* Opcode 0xf3 0x0f 0x78 - invalid */
4369/* Opcode 0xf2 0x0f 0x78 - invalid */
4370
4371/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4372#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4373FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
4374{
4375 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
4376 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVInstrDiag_Vmwrite);
4377 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVInstrDiag_Vmwrite);
4378 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
4379
4380 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4381 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4382 {
4383 /*
4384 * Register, register.
4385 */
4386 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4387 if (enmEffOpSize == IEMMODE_64BIT)
4388 {
4389 IEM_MC_BEGIN(2, 0);
4390 IEM_MC_ARG(uint64_t, u64Val, 0);
4391 IEM_MC_ARG(uint64_t, u64Enc, 1);
4392 IEM_MC_FETCH_GREG_U64(u64Val, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4393 IEM_MC_FETCH_GREG_U64(u64Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4394 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
4395 IEM_MC_END();
4396 }
4397 else
4398 {
4399 IEM_MC_BEGIN(2, 0);
4400 IEM_MC_ARG(uint32_t, u32Val, 0);
4401 IEM_MC_ARG(uint32_t, u32Enc, 1);
4402 IEM_MC_FETCH_GREG_U32(u32Val, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4403 IEM_MC_FETCH_GREG_U32(u32Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4404 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
4405 IEM_MC_END();
4406 }
4407 }
4408 else
4409 {
4410 /*
4411 * Register, memory.
4412 */
4413 if (enmEffOpSize == IEMMODE_64BIT)
4414 {
4415 IEM_MC_BEGIN(4, 0);
4416 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4417 IEM_MC_ARG_CONST(IEMMODE, enmEffAddrMode,/*=*/pVCpu->iem.s.enmEffAddrMode, 1);
4418 IEM_MC_ARG(RTGCPTR, GCPtrVal, 2);
4419 IEM_MC_ARG(uint64_t, u64Enc, 3);
4420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4421 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4422 IEM_MC_FETCH_GREG_U64(u64Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4423 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4424 IEM_MC_CALL_CIMPL_4(iemCImpl_vmwrite_mem, iEffSeg, enmEffAddrMode, GCPtrVal, u64Enc);
4425 IEM_MC_END();
4426 }
4427 else
4428 {
4429 IEM_MC_BEGIN(4, 0);
4430 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4431 IEM_MC_ARG_CONST(IEMMODE, enmEffAddrMode,/*=*/pVCpu->iem.s.enmEffAddrMode, 1);
4432 IEM_MC_ARG(RTGCPTR, GCPtrVal, 2);
4433 IEM_MC_ARG(uint32_t, u32Enc, 3);
4434 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4435 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4436 IEM_MC_FETCH_GREG_U32(u32Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4437 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4438 IEM_MC_CALL_CIMPL_4(iemCImpl_vmwrite_mem, iEffSeg, enmEffAddrMode, GCPtrVal, u32Enc);
4439 IEM_MC_END();
4440 }
4441 }
4442 return VINF_SUCCESS;
4443}
4444#else
4445FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4446#endif
4447/* Opcode 0x66 0x0f 0x79 - invalid */
4448/* Opcode 0xf3 0x0f 0x79 - invalid */
4449/* Opcode 0xf2 0x0f 0x79 - invalid */
4450
4451/* Opcode 0x0f 0x7a - invalid */
4452/* Opcode 0x66 0x0f 0x7a - invalid */
4453/* Opcode 0xf3 0x0f 0x7a - invalid */
4454/* Opcode 0xf2 0x0f 0x7a - invalid */
4455
4456/* Opcode 0x0f 0x7b - invalid */
4457/* Opcode 0x66 0x0f 0x7b - invalid */
4458/* Opcode 0xf3 0x0f 0x7b - invalid */
4459/* Opcode 0xf2 0x0f 0x7b - invalid */
4460
4461/* Opcode 0x0f 0x7c - invalid */
4462/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4463FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4464/* Opcode 0xf3 0x0f 0x7c - invalid */
4465/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4466FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4467
4468/* Opcode 0x0f 0x7d - invalid */
4469/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4470FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4471/* Opcode 0xf3 0x0f 0x7d - invalid */
4472/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4473FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4474
4475
4476/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4477FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4478{
4479 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4480 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4481 {
4482 /**
4483 * @opcode 0x7e
4484 * @opcodesub rex.w=1
4485 * @oppfx none
4486 * @opcpuid mmx
4487 * @opgroup og_mmx_datamove
4488 * @opxcpttype 5
4489 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
4490 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
4491 */
4492 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4493 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4494 {
4495 /* greg64, MMX */
4496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4497 IEM_MC_BEGIN(0, 1);
4498 IEM_MC_LOCAL(uint64_t, u64Tmp);
4499
4500 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4501 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4502
4503 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4504 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4505 IEM_MC_FPU_TO_MMX_MODE();
4506
4507 IEM_MC_ADVANCE_RIP();
4508 IEM_MC_END();
4509 }
4510 else
4511 {
4512 /* [mem64], MMX */
4513 IEM_MC_BEGIN(0, 2);
4514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4515 IEM_MC_LOCAL(uint64_t, u64Tmp);
4516
4517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4519 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4520 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4521
4522 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4523 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4524 IEM_MC_FPU_TO_MMX_MODE();
4525
4526 IEM_MC_ADVANCE_RIP();
4527 IEM_MC_END();
4528 }
4529 }
4530 else
4531 {
4532 /**
4533 * @opdone
4534 * @opcode 0x7e
4535 * @opcodesub rex.w=0
4536 * @oppfx none
4537 * @opcpuid mmx
4538 * @opgroup og_mmx_datamove
4539 * @opxcpttype 5
4540 * @opfunction iemOp_movd_q_Pd_Ey
4541 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4542 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4543 */
4544 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4545 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4546 {
4547 /* greg32, MMX */
4548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4549 IEM_MC_BEGIN(0, 1);
4550 IEM_MC_LOCAL(uint32_t, u32Tmp);
4551
4552 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4553 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4554
4555 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4556 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4557 IEM_MC_FPU_TO_MMX_MODE();
4558
4559 IEM_MC_ADVANCE_RIP();
4560 IEM_MC_END();
4561 }
4562 else
4563 {
4564 /* [mem32], MMX */
4565 IEM_MC_BEGIN(0, 2);
4566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4567 IEM_MC_LOCAL(uint32_t, u32Tmp);
4568
4569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4571 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4572 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4573
4574 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4575 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4576 IEM_MC_FPU_TO_MMX_MODE();
4577
4578 IEM_MC_ADVANCE_RIP();
4579 IEM_MC_END();
4580 }
4581 }
4582 return VINF_SUCCESS;
4583
4584}
4585
4586
4587FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4588{
4589 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4590 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4591 {
4592 /**
4593 * @opcode 0x7e
4594 * @opcodesub rex.w=1
4595 * @oppfx 0x66
4596 * @opcpuid sse2
4597 * @opgroup og_sse2_simdint_datamove
4598 * @opxcpttype 5
4599 * @optest 64-bit / op1=1 op2=2 -> op1=2
4600 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4601 */
4602 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4603 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4604 {
4605 /* greg64, XMM */
4606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4607 IEM_MC_BEGIN(0, 1);
4608 IEM_MC_LOCAL(uint64_t, u64Tmp);
4609
4610 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4611 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4612
4613 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4614 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4615
4616 IEM_MC_ADVANCE_RIP();
4617 IEM_MC_END();
4618 }
4619 else
4620 {
4621 /* [mem64], XMM */
4622 IEM_MC_BEGIN(0, 2);
4623 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4624 IEM_MC_LOCAL(uint64_t, u64Tmp);
4625
4626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4628 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4629 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4630
4631 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4632 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4633
4634 IEM_MC_ADVANCE_RIP();
4635 IEM_MC_END();
4636 }
4637 }
4638 else
4639 {
4640 /**
4641 * @opdone
4642 * @opcode 0x7e
4643 * @opcodesub rex.w=0
4644 * @oppfx 0x66
4645 * @opcpuid sse2
4646 * @opgroup og_sse2_simdint_datamove
4647 * @opxcpttype 5
4648 * @opfunction iemOp_movd_q_Vy_Ey
4649 * @optest op1=1 op2=2 -> op1=2
4650 * @optest op1=0 op2=-42 -> op1=-42
4651 */
4652 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4653 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4654 {
4655 /* greg32, XMM */
4656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4657 IEM_MC_BEGIN(0, 1);
4658 IEM_MC_LOCAL(uint32_t, u32Tmp);
4659
4660 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4661 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4662
4663 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4664 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4665
4666 IEM_MC_ADVANCE_RIP();
4667 IEM_MC_END();
4668 }
4669 else
4670 {
4671 /* [mem32], XMM */
4672 IEM_MC_BEGIN(0, 2);
4673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4674 IEM_MC_LOCAL(uint32_t, u32Tmp);
4675
4676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4678 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4679 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4680
4681 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4682 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4683
4684 IEM_MC_ADVANCE_RIP();
4685 IEM_MC_END();
4686 }
4687 }
4688 return VINF_SUCCESS;
4689
4690}
4691
4692/**
4693 * @opcode 0x7e
4694 * @oppfx 0xf3
4695 * @opcpuid sse2
4696 * @opgroup og_sse2_pcksclr_datamove
4697 * @opxcpttype none
4698 * @optest op1=1 op2=2 -> op1=2
4699 * @optest op1=0 op2=-42 -> op1=-42
4700 */
4701FNIEMOP_DEF(iemOp_movq_Vq_Wq)
4702{
4703 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4705 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4706 {
4707 /*
4708 * Register, register.
4709 */
4710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4711 IEM_MC_BEGIN(0, 2);
4712 IEM_MC_LOCAL(uint64_t, uSrc);
4713
4714 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4715 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4716
4717 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4718 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4719
4720 IEM_MC_ADVANCE_RIP();
4721 IEM_MC_END();
4722 }
4723 else
4724 {
4725 /*
4726 * Memory, register.
4727 */
4728 IEM_MC_BEGIN(0, 2);
4729 IEM_MC_LOCAL(uint64_t, uSrc);
4730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4731
4732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4734 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4735 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4736
4737 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4738 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4739
4740 IEM_MC_ADVANCE_RIP();
4741 IEM_MC_END();
4742 }
4743 return VINF_SUCCESS;
4744}
4745
4746/* Opcode 0xf2 0x0f 0x7e - invalid */
4747
4748
4749/** Opcode 0x0f 0x7f - movq Qq, Pq */
4750FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4751{
4752 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4753 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4754 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4755 {
4756 /*
4757 * Register, register.
4758 */
4759 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4760 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4762 IEM_MC_BEGIN(0, 1);
4763 IEM_MC_LOCAL(uint64_t, u64Tmp);
4764 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4765 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4766 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4767 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4768 IEM_MC_ADVANCE_RIP();
4769 IEM_MC_END();
4770 }
4771 else
4772 {
4773 /*
4774 * Register, memory.
4775 */
4776 IEM_MC_BEGIN(0, 2);
4777 IEM_MC_LOCAL(uint64_t, u64Tmp);
4778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4779
4780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4782 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4783 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4784
4785 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4786 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4787
4788 IEM_MC_ADVANCE_RIP();
4789 IEM_MC_END();
4790 }
4791 return VINF_SUCCESS;
4792}
4793
4794/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4795FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4796{
4797 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4798 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4799 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4800 {
4801 /*
4802 * Register, register.
4803 */
4804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4805 IEM_MC_BEGIN(0, 0);
4806 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4807 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4808 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4809 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4810 IEM_MC_ADVANCE_RIP();
4811 IEM_MC_END();
4812 }
4813 else
4814 {
4815 /*
4816 * Register, memory.
4817 */
4818 IEM_MC_BEGIN(0, 2);
4819 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4821
4822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4824 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4825 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4826
4827 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4828 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4829
4830 IEM_MC_ADVANCE_RIP();
4831 IEM_MC_END();
4832 }
4833 return VINF_SUCCESS;
4834}
4835
4836/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4837FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4838{
4839 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4840 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4841 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4842 {
4843 /*
4844 * Register, register.
4845 */
4846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4847 IEM_MC_BEGIN(0, 0);
4848 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4849 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4850 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4851 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4852 IEM_MC_ADVANCE_RIP();
4853 IEM_MC_END();
4854 }
4855 else
4856 {
4857 /*
4858 * Register, memory.
4859 */
4860 IEM_MC_BEGIN(0, 2);
4861 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4862 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4863
4864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4866 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4867 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4868
4869 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4870 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4871
4872 IEM_MC_ADVANCE_RIP();
4873 IEM_MC_END();
4874 }
4875 return VINF_SUCCESS;
4876}
4877
4878/* Opcode 0xf2 0x0f 0x7f - invalid */
4879
4880
4881
4882/** Opcode 0x0f 0x80. */
4883FNIEMOP_DEF(iemOp_jo_Jv)
4884{
4885 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4886 IEMOP_HLP_MIN_386();
4887 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4888 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4889 {
4890 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4892
4893 IEM_MC_BEGIN(0, 0);
4894 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4895 IEM_MC_REL_JMP_S16(i16Imm);
4896 } IEM_MC_ELSE() {
4897 IEM_MC_ADVANCE_RIP();
4898 } IEM_MC_ENDIF();
4899 IEM_MC_END();
4900 }
4901 else
4902 {
4903 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4905
4906 IEM_MC_BEGIN(0, 0);
4907 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4908 IEM_MC_REL_JMP_S32(i32Imm);
4909 } IEM_MC_ELSE() {
4910 IEM_MC_ADVANCE_RIP();
4911 } IEM_MC_ENDIF();
4912 IEM_MC_END();
4913 }
4914 return VINF_SUCCESS;
4915}
4916
4917
4918/** Opcode 0x0f 0x81. */
4919FNIEMOP_DEF(iemOp_jno_Jv)
4920{
4921 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4922 IEMOP_HLP_MIN_386();
4923 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4924 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4925 {
4926 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4928
4929 IEM_MC_BEGIN(0, 0);
4930 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4931 IEM_MC_ADVANCE_RIP();
4932 } IEM_MC_ELSE() {
4933 IEM_MC_REL_JMP_S16(i16Imm);
4934 } IEM_MC_ENDIF();
4935 IEM_MC_END();
4936 }
4937 else
4938 {
4939 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4941
4942 IEM_MC_BEGIN(0, 0);
4943 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4944 IEM_MC_ADVANCE_RIP();
4945 } IEM_MC_ELSE() {
4946 IEM_MC_REL_JMP_S32(i32Imm);
4947 } IEM_MC_ENDIF();
4948 IEM_MC_END();
4949 }
4950 return VINF_SUCCESS;
4951}
4952
4953
4954/** Opcode 0x0f 0x82. */
4955FNIEMOP_DEF(iemOp_jc_Jv)
4956{
4957 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4958 IEMOP_HLP_MIN_386();
4959 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4960 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4961 {
4962 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4964
4965 IEM_MC_BEGIN(0, 0);
4966 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4967 IEM_MC_REL_JMP_S16(i16Imm);
4968 } IEM_MC_ELSE() {
4969 IEM_MC_ADVANCE_RIP();
4970 } IEM_MC_ENDIF();
4971 IEM_MC_END();
4972 }
4973 else
4974 {
4975 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4977
4978 IEM_MC_BEGIN(0, 0);
4979 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4980 IEM_MC_REL_JMP_S32(i32Imm);
4981 } IEM_MC_ELSE() {
4982 IEM_MC_ADVANCE_RIP();
4983 } IEM_MC_ENDIF();
4984 IEM_MC_END();
4985 }
4986 return VINF_SUCCESS;
4987}
4988
4989
4990/** Opcode 0x0f 0x83. */
4991FNIEMOP_DEF(iemOp_jnc_Jv)
4992{
4993 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4994 IEMOP_HLP_MIN_386();
4995 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4996 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4997 {
4998 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5000
5001 IEM_MC_BEGIN(0, 0);
5002 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5003 IEM_MC_ADVANCE_RIP();
5004 } IEM_MC_ELSE() {
5005 IEM_MC_REL_JMP_S16(i16Imm);
5006 } IEM_MC_ENDIF();
5007 IEM_MC_END();
5008 }
5009 else
5010 {
5011 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5013
5014 IEM_MC_BEGIN(0, 0);
5015 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5016 IEM_MC_ADVANCE_RIP();
5017 } IEM_MC_ELSE() {
5018 IEM_MC_REL_JMP_S32(i32Imm);
5019 } IEM_MC_ENDIF();
5020 IEM_MC_END();
5021 }
5022 return VINF_SUCCESS;
5023}
5024
5025
5026/** Opcode 0x0f 0x84. */
5027FNIEMOP_DEF(iemOp_je_Jv)
5028{
5029 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
5030 IEMOP_HLP_MIN_386();
5031 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5032 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5033 {
5034 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5036
5037 IEM_MC_BEGIN(0, 0);
5038 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5039 IEM_MC_REL_JMP_S16(i16Imm);
5040 } IEM_MC_ELSE() {
5041 IEM_MC_ADVANCE_RIP();
5042 } IEM_MC_ENDIF();
5043 IEM_MC_END();
5044 }
5045 else
5046 {
5047 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5049
5050 IEM_MC_BEGIN(0, 0);
5051 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5052 IEM_MC_REL_JMP_S32(i32Imm);
5053 } IEM_MC_ELSE() {
5054 IEM_MC_ADVANCE_RIP();
5055 } IEM_MC_ENDIF();
5056 IEM_MC_END();
5057 }
5058 return VINF_SUCCESS;
5059}
5060
5061
5062/** Opcode 0x0f 0x85. */
5063FNIEMOP_DEF(iemOp_jne_Jv)
5064{
5065 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
5066 IEMOP_HLP_MIN_386();
5067 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5068 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5069 {
5070 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5072
5073 IEM_MC_BEGIN(0, 0);
5074 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5075 IEM_MC_ADVANCE_RIP();
5076 } IEM_MC_ELSE() {
5077 IEM_MC_REL_JMP_S16(i16Imm);
5078 } IEM_MC_ENDIF();
5079 IEM_MC_END();
5080 }
5081 else
5082 {
5083 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5085
5086 IEM_MC_BEGIN(0, 0);
5087 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5088 IEM_MC_ADVANCE_RIP();
5089 } IEM_MC_ELSE() {
5090 IEM_MC_REL_JMP_S32(i32Imm);
5091 } IEM_MC_ENDIF();
5092 IEM_MC_END();
5093 }
5094 return VINF_SUCCESS;
5095}
5096
5097
5098/** Opcode 0x0f 0x86. */
5099FNIEMOP_DEF(iemOp_jbe_Jv)
5100{
5101 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
5102 IEMOP_HLP_MIN_386();
5103 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5104 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5105 {
5106 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5108
5109 IEM_MC_BEGIN(0, 0);
5110 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5111 IEM_MC_REL_JMP_S16(i16Imm);
5112 } IEM_MC_ELSE() {
5113 IEM_MC_ADVANCE_RIP();
5114 } IEM_MC_ENDIF();
5115 IEM_MC_END();
5116 }
5117 else
5118 {
5119 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5121
5122 IEM_MC_BEGIN(0, 0);
5123 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5124 IEM_MC_REL_JMP_S32(i32Imm);
5125 } IEM_MC_ELSE() {
5126 IEM_MC_ADVANCE_RIP();
5127 } IEM_MC_ENDIF();
5128 IEM_MC_END();
5129 }
5130 return VINF_SUCCESS;
5131}
5132
5133
5134/** Opcode 0x0f 0x87. */
5135FNIEMOP_DEF(iemOp_jnbe_Jv)
5136{
5137 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
5138 IEMOP_HLP_MIN_386();
5139 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5140 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5141 {
5142 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5144
5145 IEM_MC_BEGIN(0, 0);
5146 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5147 IEM_MC_ADVANCE_RIP();
5148 } IEM_MC_ELSE() {
5149 IEM_MC_REL_JMP_S16(i16Imm);
5150 } IEM_MC_ENDIF();
5151 IEM_MC_END();
5152 }
5153 else
5154 {
5155 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5157
5158 IEM_MC_BEGIN(0, 0);
5159 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5160 IEM_MC_ADVANCE_RIP();
5161 } IEM_MC_ELSE() {
5162 IEM_MC_REL_JMP_S32(i32Imm);
5163 } IEM_MC_ENDIF();
5164 IEM_MC_END();
5165 }
5166 return VINF_SUCCESS;
5167}
5168
5169
5170/** Opcode 0x0f 0x88. */
5171FNIEMOP_DEF(iemOp_js_Jv)
5172{
5173 IEMOP_MNEMONIC(js_Jv, "js Jv");
5174 IEMOP_HLP_MIN_386();
5175 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5176 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5177 {
5178 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5180
5181 IEM_MC_BEGIN(0, 0);
5182 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5183 IEM_MC_REL_JMP_S16(i16Imm);
5184 } IEM_MC_ELSE() {
5185 IEM_MC_ADVANCE_RIP();
5186 } IEM_MC_ENDIF();
5187 IEM_MC_END();
5188 }
5189 else
5190 {
5191 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5193
5194 IEM_MC_BEGIN(0, 0);
5195 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5196 IEM_MC_REL_JMP_S32(i32Imm);
5197 } IEM_MC_ELSE() {
5198 IEM_MC_ADVANCE_RIP();
5199 } IEM_MC_ENDIF();
5200 IEM_MC_END();
5201 }
5202 return VINF_SUCCESS;
5203}
5204
5205
5206/** Opcode 0x0f 0x89. */
5207FNIEMOP_DEF(iemOp_jns_Jv)
5208{
5209 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
5210 IEMOP_HLP_MIN_386();
5211 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5212 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5213 {
5214 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5216
5217 IEM_MC_BEGIN(0, 0);
5218 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5219 IEM_MC_ADVANCE_RIP();
5220 } IEM_MC_ELSE() {
5221 IEM_MC_REL_JMP_S16(i16Imm);
5222 } IEM_MC_ENDIF();
5223 IEM_MC_END();
5224 }
5225 else
5226 {
5227 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5229
5230 IEM_MC_BEGIN(0, 0);
5231 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5232 IEM_MC_ADVANCE_RIP();
5233 } IEM_MC_ELSE() {
5234 IEM_MC_REL_JMP_S32(i32Imm);
5235 } IEM_MC_ENDIF();
5236 IEM_MC_END();
5237 }
5238 return VINF_SUCCESS;
5239}
5240
5241
5242/** Opcode 0x0f 0x8a. */
5243FNIEMOP_DEF(iemOp_jp_Jv)
5244{
5245 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
5246 IEMOP_HLP_MIN_386();
5247 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5248 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5249 {
5250 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5252
5253 IEM_MC_BEGIN(0, 0);
5254 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5255 IEM_MC_REL_JMP_S16(i16Imm);
5256 } IEM_MC_ELSE() {
5257 IEM_MC_ADVANCE_RIP();
5258 } IEM_MC_ENDIF();
5259 IEM_MC_END();
5260 }
5261 else
5262 {
5263 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5265
5266 IEM_MC_BEGIN(0, 0);
5267 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5268 IEM_MC_REL_JMP_S32(i32Imm);
5269 } IEM_MC_ELSE() {
5270 IEM_MC_ADVANCE_RIP();
5271 } IEM_MC_ENDIF();
5272 IEM_MC_END();
5273 }
5274 return VINF_SUCCESS;
5275}
5276
5277
5278/** Opcode 0x0f 0x8b. */
5279FNIEMOP_DEF(iemOp_jnp_Jv)
5280{
5281 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
5282 IEMOP_HLP_MIN_386();
5283 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5284 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5285 {
5286 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5288
5289 IEM_MC_BEGIN(0, 0);
5290 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5291 IEM_MC_ADVANCE_RIP();
5292 } IEM_MC_ELSE() {
5293 IEM_MC_REL_JMP_S16(i16Imm);
5294 } IEM_MC_ENDIF();
5295 IEM_MC_END();
5296 }
5297 else
5298 {
5299 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5301
5302 IEM_MC_BEGIN(0, 0);
5303 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5304 IEM_MC_ADVANCE_RIP();
5305 } IEM_MC_ELSE() {
5306 IEM_MC_REL_JMP_S32(i32Imm);
5307 } IEM_MC_ENDIF();
5308 IEM_MC_END();
5309 }
5310 return VINF_SUCCESS;
5311}
5312
5313
5314/** Opcode 0x0f 0x8c. */
5315FNIEMOP_DEF(iemOp_jl_Jv)
5316{
5317 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
5318 IEMOP_HLP_MIN_386();
5319 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5320 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5321 {
5322 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5324
5325 IEM_MC_BEGIN(0, 0);
5326 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5327 IEM_MC_REL_JMP_S16(i16Imm);
5328 } IEM_MC_ELSE() {
5329 IEM_MC_ADVANCE_RIP();
5330 } IEM_MC_ENDIF();
5331 IEM_MC_END();
5332 }
5333 else
5334 {
5335 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5337
5338 IEM_MC_BEGIN(0, 0);
5339 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5340 IEM_MC_REL_JMP_S32(i32Imm);
5341 } IEM_MC_ELSE() {
5342 IEM_MC_ADVANCE_RIP();
5343 } IEM_MC_ENDIF();
5344 IEM_MC_END();
5345 }
5346 return VINF_SUCCESS;
5347}
5348
5349
5350/** Opcode 0x0f 0x8d. */
5351FNIEMOP_DEF(iemOp_jnl_Jv)
5352{
5353 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5354 IEMOP_HLP_MIN_386();
5355 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5356 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5357 {
5358 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5360
5361 IEM_MC_BEGIN(0, 0);
5362 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5363 IEM_MC_ADVANCE_RIP();
5364 } IEM_MC_ELSE() {
5365 IEM_MC_REL_JMP_S16(i16Imm);
5366 } IEM_MC_ENDIF();
5367 IEM_MC_END();
5368 }
5369 else
5370 {
5371 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5373
5374 IEM_MC_BEGIN(0, 0);
5375 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5376 IEM_MC_ADVANCE_RIP();
5377 } IEM_MC_ELSE() {
5378 IEM_MC_REL_JMP_S32(i32Imm);
5379 } IEM_MC_ENDIF();
5380 IEM_MC_END();
5381 }
5382 return VINF_SUCCESS;
5383}
5384
5385
5386/** Opcode 0x0f 0x8e. */
5387FNIEMOP_DEF(iemOp_jle_Jv)
5388{
5389 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5390 IEMOP_HLP_MIN_386();
5391 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5392 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5393 {
5394 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5396
5397 IEM_MC_BEGIN(0, 0);
5398 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5399 IEM_MC_REL_JMP_S16(i16Imm);
5400 } IEM_MC_ELSE() {
5401 IEM_MC_ADVANCE_RIP();
5402 } IEM_MC_ENDIF();
5403 IEM_MC_END();
5404 }
5405 else
5406 {
5407 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5409
5410 IEM_MC_BEGIN(0, 0);
5411 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5412 IEM_MC_REL_JMP_S32(i32Imm);
5413 } IEM_MC_ELSE() {
5414 IEM_MC_ADVANCE_RIP();
5415 } IEM_MC_ENDIF();
5416 IEM_MC_END();
5417 }
5418 return VINF_SUCCESS;
5419}
5420
5421
5422/** Opcode 0x0f 0x8f. */
5423FNIEMOP_DEF(iemOp_jnle_Jv)
5424{
5425 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5426 IEMOP_HLP_MIN_386();
5427 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5428 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5429 {
5430 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5432
5433 IEM_MC_BEGIN(0, 0);
5434 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5435 IEM_MC_ADVANCE_RIP();
5436 } IEM_MC_ELSE() {
5437 IEM_MC_REL_JMP_S16(i16Imm);
5438 } IEM_MC_ENDIF();
5439 IEM_MC_END();
5440 }
5441 else
5442 {
5443 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5445
5446 IEM_MC_BEGIN(0, 0);
5447 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5448 IEM_MC_ADVANCE_RIP();
5449 } IEM_MC_ELSE() {
5450 IEM_MC_REL_JMP_S32(i32Imm);
5451 } IEM_MC_ENDIF();
5452 IEM_MC_END();
5453 }
5454 return VINF_SUCCESS;
5455}
5456
5457
5458/** Opcode 0x0f 0x90. */
5459FNIEMOP_DEF(iemOp_seto_Eb)
5460{
5461 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5462 IEMOP_HLP_MIN_386();
5463 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5464
5465 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5466 * any way. AMD says it's "unused", whatever that means. We're
5467 * ignoring for now. */
5468 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5469 {
5470 /* register target */
5471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5472 IEM_MC_BEGIN(0, 0);
5473 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5474 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5475 } IEM_MC_ELSE() {
5476 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5477 } IEM_MC_ENDIF();
5478 IEM_MC_ADVANCE_RIP();
5479 IEM_MC_END();
5480 }
5481 else
5482 {
5483 /* memory target */
5484 IEM_MC_BEGIN(0, 1);
5485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5488 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5489 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5490 } IEM_MC_ELSE() {
5491 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5492 } IEM_MC_ENDIF();
5493 IEM_MC_ADVANCE_RIP();
5494 IEM_MC_END();
5495 }
5496 return VINF_SUCCESS;
5497}
5498
5499
5500/** Opcode 0x0f 0x91. */
5501FNIEMOP_DEF(iemOp_setno_Eb)
5502{
5503 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5504 IEMOP_HLP_MIN_386();
5505 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5506
5507 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5508 * any way. AMD says it's "unused", whatever that means. We're
5509 * ignoring for now. */
5510 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5511 {
5512 /* register target */
5513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5514 IEM_MC_BEGIN(0, 0);
5515 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5516 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5517 } IEM_MC_ELSE() {
5518 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5519 } IEM_MC_ENDIF();
5520 IEM_MC_ADVANCE_RIP();
5521 IEM_MC_END();
5522 }
5523 else
5524 {
5525 /* memory target */
5526 IEM_MC_BEGIN(0, 1);
5527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5530 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5531 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5532 } IEM_MC_ELSE() {
5533 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5534 } IEM_MC_ENDIF();
5535 IEM_MC_ADVANCE_RIP();
5536 IEM_MC_END();
5537 }
5538 return VINF_SUCCESS;
5539}
5540
5541
5542/** Opcode 0x0f 0x92. */
5543FNIEMOP_DEF(iemOp_setc_Eb)
5544{
5545 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5546 IEMOP_HLP_MIN_386();
5547 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5548
5549 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5550 * any way. AMD says it's "unused", whatever that means. We're
5551 * ignoring for now. */
5552 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5553 {
5554 /* register target */
5555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5556 IEM_MC_BEGIN(0, 0);
5557 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5558 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5559 } IEM_MC_ELSE() {
5560 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5561 } IEM_MC_ENDIF();
5562 IEM_MC_ADVANCE_RIP();
5563 IEM_MC_END();
5564 }
5565 else
5566 {
5567 /* memory target */
5568 IEM_MC_BEGIN(0, 1);
5569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5572 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5573 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5574 } IEM_MC_ELSE() {
5575 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5576 } IEM_MC_ENDIF();
5577 IEM_MC_ADVANCE_RIP();
5578 IEM_MC_END();
5579 }
5580 return VINF_SUCCESS;
5581}
5582
5583
5584/** Opcode 0x0f 0x93. */
5585FNIEMOP_DEF(iemOp_setnc_Eb)
5586{
5587 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5588 IEMOP_HLP_MIN_386();
5589 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5590
5591 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5592 * any way. AMD says it's "unused", whatever that means. We're
5593 * ignoring for now. */
5594 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5595 {
5596 /* register target */
5597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5598 IEM_MC_BEGIN(0, 0);
5599 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5600 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5601 } IEM_MC_ELSE() {
5602 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5603 } IEM_MC_ENDIF();
5604 IEM_MC_ADVANCE_RIP();
5605 IEM_MC_END();
5606 }
5607 else
5608 {
5609 /* memory target */
5610 IEM_MC_BEGIN(0, 1);
5611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5614 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5615 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5616 } IEM_MC_ELSE() {
5617 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5618 } IEM_MC_ENDIF();
5619 IEM_MC_ADVANCE_RIP();
5620 IEM_MC_END();
5621 }
5622 return VINF_SUCCESS;
5623}
5624
5625
5626/** Opcode 0x0f 0x94. */
5627FNIEMOP_DEF(iemOp_sete_Eb)
5628{
5629 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5630 IEMOP_HLP_MIN_386();
5631 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5632
5633 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5634 * any way. AMD says it's "unused", whatever that means. We're
5635 * ignoring for now. */
5636 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5637 {
5638 /* register target */
5639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5640 IEM_MC_BEGIN(0, 0);
5641 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5642 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5643 } IEM_MC_ELSE() {
5644 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5645 } IEM_MC_ENDIF();
5646 IEM_MC_ADVANCE_RIP();
5647 IEM_MC_END();
5648 }
5649 else
5650 {
5651 /* memory target */
5652 IEM_MC_BEGIN(0, 1);
5653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5656 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5657 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5658 } IEM_MC_ELSE() {
5659 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5660 } IEM_MC_ENDIF();
5661 IEM_MC_ADVANCE_RIP();
5662 IEM_MC_END();
5663 }
5664 return VINF_SUCCESS;
5665}
5666
5667
5668/** Opcode 0x0f 0x95. */
5669FNIEMOP_DEF(iemOp_setne_Eb)
5670{
5671 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5672 IEMOP_HLP_MIN_386();
5673 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5674
5675 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5676 * any way. AMD says it's "unused", whatever that means. We're
5677 * ignoring for now. */
5678 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5679 {
5680 /* register target */
5681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5682 IEM_MC_BEGIN(0, 0);
5683 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5684 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5685 } IEM_MC_ELSE() {
5686 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5687 } IEM_MC_ENDIF();
5688 IEM_MC_ADVANCE_RIP();
5689 IEM_MC_END();
5690 }
5691 else
5692 {
5693 /* memory target */
5694 IEM_MC_BEGIN(0, 1);
5695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5698 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5699 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5700 } IEM_MC_ELSE() {
5701 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5702 } IEM_MC_ENDIF();
5703 IEM_MC_ADVANCE_RIP();
5704 IEM_MC_END();
5705 }
5706 return VINF_SUCCESS;
5707}
5708
5709
5710/** Opcode 0x0f 0x96. */
5711FNIEMOP_DEF(iemOp_setbe_Eb)
5712{
5713 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5714 IEMOP_HLP_MIN_386();
5715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5716
5717 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5718 * any way. AMD says it's "unused", whatever that means. We're
5719 * ignoring for now. */
5720 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5721 {
5722 /* register target */
5723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5724 IEM_MC_BEGIN(0, 0);
5725 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5726 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5727 } IEM_MC_ELSE() {
5728 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5729 } IEM_MC_ENDIF();
5730 IEM_MC_ADVANCE_RIP();
5731 IEM_MC_END();
5732 }
5733 else
5734 {
5735 /* memory target */
5736 IEM_MC_BEGIN(0, 1);
5737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5740 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5741 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5742 } IEM_MC_ELSE() {
5743 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5744 } IEM_MC_ENDIF();
5745 IEM_MC_ADVANCE_RIP();
5746 IEM_MC_END();
5747 }
5748 return VINF_SUCCESS;
5749}
5750
5751
5752/** Opcode 0x0f 0x97. */
5753FNIEMOP_DEF(iemOp_setnbe_Eb)
5754{
5755 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5756 IEMOP_HLP_MIN_386();
5757 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5758
5759 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5760 * any way. AMD says it's "unused", whatever that means. We're
5761 * ignoring for now. */
5762 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5763 {
5764 /* register target */
5765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5766 IEM_MC_BEGIN(0, 0);
5767 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5768 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5769 } IEM_MC_ELSE() {
5770 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5771 } IEM_MC_ENDIF();
5772 IEM_MC_ADVANCE_RIP();
5773 IEM_MC_END();
5774 }
5775 else
5776 {
5777 /* memory target */
5778 IEM_MC_BEGIN(0, 1);
5779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5782 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5783 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5784 } IEM_MC_ELSE() {
5785 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5786 } IEM_MC_ENDIF();
5787 IEM_MC_ADVANCE_RIP();
5788 IEM_MC_END();
5789 }
5790 return VINF_SUCCESS;
5791}
5792
5793
5794/** Opcode 0x0f 0x98. */
5795FNIEMOP_DEF(iemOp_sets_Eb)
5796{
5797 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5798 IEMOP_HLP_MIN_386();
5799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5800
5801 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5802 * any way. AMD says it's "unused", whatever that means. We're
5803 * ignoring for now. */
5804 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5805 {
5806 /* register target */
5807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5808 IEM_MC_BEGIN(0, 0);
5809 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5810 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5811 } IEM_MC_ELSE() {
5812 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5813 } IEM_MC_ENDIF();
5814 IEM_MC_ADVANCE_RIP();
5815 IEM_MC_END();
5816 }
5817 else
5818 {
5819 /* memory target */
5820 IEM_MC_BEGIN(0, 1);
5821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5824 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5825 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5826 } IEM_MC_ELSE() {
5827 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5828 } IEM_MC_ENDIF();
5829 IEM_MC_ADVANCE_RIP();
5830 IEM_MC_END();
5831 }
5832 return VINF_SUCCESS;
5833}
5834
5835
5836/** Opcode 0x0f 0x99. */
5837FNIEMOP_DEF(iemOp_setns_Eb)
5838{
5839 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5840 IEMOP_HLP_MIN_386();
5841 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5842
5843 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5844 * any way. AMD says it's "unused", whatever that means. We're
5845 * ignoring for now. */
5846 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5847 {
5848 /* register target */
5849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5850 IEM_MC_BEGIN(0, 0);
5851 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5852 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5853 } IEM_MC_ELSE() {
5854 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5855 } IEM_MC_ENDIF();
5856 IEM_MC_ADVANCE_RIP();
5857 IEM_MC_END();
5858 }
5859 else
5860 {
5861 /* memory target */
5862 IEM_MC_BEGIN(0, 1);
5863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5866 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5867 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5868 } IEM_MC_ELSE() {
5869 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5870 } IEM_MC_ENDIF();
5871 IEM_MC_ADVANCE_RIP();
5872 IEM_MC_END();
5873 }
5874 return VINF_SUCCESS;
5875}
5876
5877
5878/** Opcode 0x0f 0x9a. */
5879FNIEMOP_DEF(iemOp_setp_Eb)
5880{
5881 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5882 IEMOP_HLP_MIN_386();
5883 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5884
5885 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5886 * any way. AMD says it's "unused", whatever that means. We're
5887 * ignoring for now. */
5888 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5889 {
5890 /* register target */
5891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5892 IEM_MC_BEGIN(0, 0);
5893 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5894 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5895 } IEM_MC_ELSE() {
5896 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5897 } IEM_MC_ENDIF();
5898 IEM_MC_ADVANCE_RIP();
5899 IEM_MC_END();
5900 }
5901 else
5902 {
5903 /* memory target */
5904 IEM_MC_BEGIN(0, 1);
5905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5908 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5909 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5910 } IEM_MC_ELSE() {
5911 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5912 } IEM_MC_ENDIF();
5913 IEM_MC_ADVANCE_RIP();
5914 IEM_MC_END();
5915 }
5916 return VINF_SUCCESS;
5917}
5918
5919
5920/** Opcode 0x0f 0x9b. */
5921FNIEMOP_DEF(iemOp_setnp_Eb)
5922{
5923 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5924 IEMOP_HLP_MIN_386();
5925 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5926
5927 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5928 * any way. AMD says it's "unused", whatever that means. We're
5929 * ignoring for now. */
5930 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5931 {
5932 /* register target */
5933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5934 IEM_MC_BEGIN(0, 0);
5935 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5936 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5937 } IEM_MC_ELSE() {
5938 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5939 } IEM_MC_ENDIF();
5940 IEM_MC_ADVANCE_RIP();
5941 IEM_MC_END();
5942 }
5943 else
5944 {
5945 /* memory target */
5946 IEM_MC_BEGIN(0, 1);
5947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5950 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5951 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5952 } IEM_MC_ELSE() {
5953 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5954 } IEM_MC_ENDIF();
5955 IEM_MC_ADVANCE_RIP();
5956 IEM_MC_END();
5957 }
5958 return VINF_SUCCESS;
5959}
5960
5961
5962/** Opcode 0x0f 0x9c. */
5963FNIEMOP_DEF(iemOp_setl_Eb)
5964{
5965 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5966 IEMOP_HLP_MIN_386();
5967 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5968
5969 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5970 * any way. AMD says it's "unused", whatever that means. We're
5971 * ignoring for now. */
5972 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5973 {
5974 /* register target */
5975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5976 IEM_MC_BEGIN(0, 0);
5977 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5978 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5979 } IEM_MC_ELSE() {
5980 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5981 } IEM_MC_ENDIF();
5982 IEM_MC_ADVANCE_RIP();
5983 IEM_MC_END();
5984 }
5985 else
5986 {
5987 /* memory target */
5988 IEM_MC_BEGIN(0, 1);
5989 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5992 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5993 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5994 } IEM_MC_ELSE() {
5995 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5996 } IEM_MC_ENDIF();
5997 IEM_MC_ADVANCE_RIP();
5998 IEM_MC_END();
5999 }
6000 return VINF_SUCCESS;
6001}
6002
6003
6004/** Opcode 0x0f 0x9d. */
6005FNIEMOP_DEF(iemOp_setnl_Eb)
6006{
6007 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
6008 IEMOP_HLP_MIN_386();
6009 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6010
6011 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6012 * any way. AMD says it's "unused", whatever that means. We're
6013 * ignoring for now. */
6014 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6015 {
6016 /* register target */
6017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6018 IEM_MC_BEGIN(0, 0);
6019 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6020 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6021 } IEM_MC_ELSE() {
6022 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6023 } IEM_MC_ENDIF();
6024 IEM_MC_ADVANCE_RIP();
6025 IEM_MC_END();
6026 }
6027 else
6028 {
6029 /* memory target */
6030 IEM_MC_BEGIN(0, 1);
6031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6034 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6035 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6036 } IEM_MC_ELSE() {
6037 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6038 } IEM_MC_ENDIF();
6039 IEM_MC_ADVANCE_RIP();
6040 IEM_MC_END();
6041 }
6042 return VINF_SUCCESS;
6043}
6044
6045
6046/** Opcode 0x0f 0x9e. */
6047FNIEMOP_DEF(iemOp_setle_Eb)
6048{
6049 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
6050 IEMOP_HLP_MIN_386();
6051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6052
6053 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6054 * any way. AMD says it's "unused", whatever that means. We're
6055 * ignoring for now. */
6056 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6057 {
6058 /* register target */
6059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6060 IEM_MC_BEGIN(0, 0);
6061 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6062 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6063 } IEM_MC_ELSE() {
6064 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6065 } IEM_MC_ENDIF();
6066 IEM_MC_ADVANCE_RIP();
6067 IEM_MC_END();
6068 }
6069 else
6070 {
6071 /* memory target */
6072 IEM_MC_BEGIN(0, 1);
6073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6076 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6077 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6078 } IEM_MC_ELSE() {
6079 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6080 } IEM_MC_ENDIF();
6081 IEM_MC_ADVANCE_RIP();
6082 IEM_MC_END();
6083 }
6084 return VINF_SUCCESS;
6085}
6086
6087
6088/** Opcode 0x0f 0x9f. */
6089FNIEMOP_DEF(iemOp_setnle_Eb)
6090{
6091 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
6092 IEMOP_HLP_MIN_386();
6093 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6094
6095 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6096 * any way. AMD says it's "unused", whatever that means. We're
6097 * ignoring for now. */
6098 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6099 {
6100 /* register target */
6101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6102 IEM_MC_BEGIN(0, 0);
6103 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6104 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6105 } IEM_MC_ELSE() {
6106 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6107 } IEM_MC_ENDIF();
6108 IEM_MC_ADVANCE_RIP();
6109 IEM_MC_END();
6110 }
6111 else
6112 {
6113 /* memory target */
6114 IEM_MC_BEGIN(0, 1);
6115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6118 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6119 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6120 } IEM_MC_ELSE() {
6121 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6122 } IEM_MC_ENDIF();
6123 IEM_MC_ADVANCE_RIP();
6124 IEM_MC_END();
6125 }
6126 return VINF_SUCCESS;
6127}
6128
6129
6130/**
6131 * Common 'push segment-register' helper.
6132 */
6133FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
6134{
6135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6136 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
6137 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6138
6139 switch (pVCpu->iem.s.enmEffOpSize)
6140 {
6141 case IEMMODE_16BIT:
6142 IEM_MC_BEGIN(0, 1);
6143 IEM_MC_LOCAL(uint16_t, u16Value);
6144 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
6145 IEM_MC_PUSH_U16(u16Value);
6146 IEM_MC_ADVANCE_RIP();
6147 IEM_MC_END();
6148 break;
6149
6150 case IEMMODE_32BIT:
6151 IEM_MC_BEGIN(0, 1);
6152 IEM_MC_LOCAL(uint32_t, u32Value);
6153 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
6154 IEM_MC_PUSH_U32_SREG(u32Value);
6155 IEM_MC_ADVANCE_RIP();
6156 IEM_MC_END();
6157 break;
6158
6159 case IEMMODE_64BIT:
6160 IEM_MC_BEGIN(0, 1);
6161 IEM_MC_LOCAL(uint64_t, u64Value);
6162 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
6163 IEM_MC_PUSH_U64(u64Value);
6164 IEM_MC_ADVANCE_RIP();
6165 IEM_MC_END();
6166 break;
6167 }
6168
6169 return VINF_SUCCESS;
6170}
6171
6172
6173/** Opcode 0x0f 0xa0. */
6174FNIEMOP_DEF(iemOp_push_fs)
6175{
6176 IEMOP_MNEMONIC(push_fs, "push fs");
6177 IEMOP_HLP_MIN_386();
6178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6179 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
6180}
6181
6182
6183/** Opcode 0x0f 0xa1. */
6184FNIEMOP_DEF(iemOp_pop_fs)
6185{
6186 IEMOP_MNEMONIC(pop_fs, "pop fs");
6187 IEMOP_HLP_MIN_386();
6188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6189 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
6190}
6191
6192
6193/** Opcode 0x0f 0xa2. */
6194FNIEMOP_DEF(iemOp_cpuid)
6195{
6196 IEMOP_MNEMONIC(cpuid, "cpuid");
6197 IEMOP_HLP_MIN_486(); /* not all 486es. */
6198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6199 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
6200}
6201
6202
6203/**
6204 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
6205 * iemOp_bts_Ev_Gv.
6206 */
6207FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
6208{
6209 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6210 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6211
6212 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6213 {
6214 /* register destination. */
6215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6216 switch (pVCpu->iem.s.enmEffOpSize)
6217 {
6218 case IEMMODE_16BIT:
6219 IEM_MC_BEGIN(3, 0);
6220 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6221 IEM_MC_ARG(uint16_t, u16Src, 1);
6222 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6223
6224 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6225 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
6226 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6227 IEM_MC_REF_EFLAGS(pEFlags);
6228 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6229
6230 IEM_MC_ADVANCE_RIP();
6231 IEM_MC_END();
6232 return VINF_SUCCESS;
6233
6234 case IEMMODE_32BIT:
6235 IEM_MC_BEGIN(3, 0);
6236 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6237 IEM_MC_ARG(uint32_t, u32Src, 1);
6238 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6239
6240 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6241 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
6242 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6243 IEM_MC_REF_EFLAGS(pEFlags);
6244 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6245
6246 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6247 IEM_MC_ADVANCE_RIP();
6248 IEM_MC_END();
6249 return VINF_SUCCESS;
6250
6251 case IEMMODE_64BIT:
6252 IEM_MC_BEGIN(3, 0);
6253 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6254 IEM_MC_ARG(uint64_t, u64Src, 1);
6255 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6256
6257 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6258 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
6259 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6260 IEM_MC_REF_EFLAGS(pEFlags);
6261 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6262
6263 IEM_MC_ADVANCE_RIP();
6264 IEM_MC_END();
6265 return VINF_SUCCESS;
6266
6267 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6268 }
6269 }
6270 else
6271 {
6272 /* memory destination. */
6273
6274 uint32_t fAccess;
6275 if (pImpl->pfnLockedU16)
6276 fAccess = IEM_ACCESS_DATA_RW;
6277 else /* BT */
6278 fAccess = IEM_ACCESS_DATA_R;
6279
6280 /** @todo test negative bit offsets! */
6281 switch (pVCpu->iem.s.enmEffOpSize)
6282 {
6283 case IEMMODE_16BIT:
6284 IEM_MC_BEGIN(3, 2);
6285 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6286 IEM_MC_ARG(uint16_t, u16Src, 1);
6287 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6289 IEM_MC_LOCAL(int16_t, i16AddrAdj);
6290
6291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6292 if (pImpl->pfnLockedU16)
6293 IEMOP_HLP_DONE_DECODING();
6294 else
6295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6296 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6297 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
6298 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
6299 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
6300 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
6301 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
6302 IEM_MC_FETCH_EFLAGS(EFlags);
6303
6304 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6305 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6306 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6307 else
6308 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6309 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6310
6311 IEM_MC_COMMIT_EFLAGS(EFlags);
6312 IEM_MC_ADVANCE_RIP();
6313 IEM_MC_END();
6314 return VINF_SUCCESS;
6315
6316 case IEMMODE_32BIT:
6317 IEM_MC_BEGIN(3, 2);
6318 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6319 IEM_MC_ARG(uint32_t, u32Src, 1);
6320 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6322 IEM_MC_LOCAL(int32_t, i32AddrAdj);
6323
6324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6325 if (pImpl->pfnLockedU16)
6326 IEMOP_HLP_DONE_DECODING();
6327 else
6328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6329 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6330 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
6331 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
6332 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
6333 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
6334 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
6335 IEM_MC_FETCH_EFLAGS(EFlags);
6336
6337 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6338 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6339 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6340 else
6341 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6342 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6343
6344 IEM_MC_COMMIT_EFLAGS(EFlags);
6345 IEM_MC_ADVANCE_RIP();
6346 IEM_MC_END();
6347 return VINF_SUCCESS;
6348
6349 case IEMMODE_64BIT:
6350 IEM_MC_BEGIN(3, 2);
6351 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6352 IEM_MC_ARG(uint64_t, u64Src, 1);
6353 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6355 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6356
6357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6358 if (pImpl->pfnLockedU16)
6359 IEMOP_HLP_DONE_DECODING();
6360 else
6361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6362 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6363 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6364 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6365 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6366 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6367 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6368 IEM_MC_FETCH_EFLAGS(EFlags);
6369
6370 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6371 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6372 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6373 else
6374 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6375 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6376
6377 IEM_MC_COMMIT_EFLAGS(EFlags);
6378 IEM_MC_ADVANCE_RIP();
6379 IEM_MC_END();
6380 return VINF_SUCCESS;
6381
6382 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6383 }
6384 }
6385}
6386
6387
6388/** Opcode 0x0f 0xa3. */
6389FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6390{
6391 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6392 IEMOP_HLP_MIN_386();
6393 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6394}
6395
6396
6397/**
6398 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6399 */
6400FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6401{
6402 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6403 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6404
6405 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6406 {
6407 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6409
6410 switch (pVCpu->iem.s.enmEffOpSize)
6411 {
6412 case IEMMODE_16BIT:
6413 IEM_MC_BEGIN(4, 0);
6414 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6415 IEM_MC_ARG(uint16_t, u16Src, 1);
6416 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6417 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6418
6419 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6420 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6421 IEM_MC_REF_EFLAGS(pEFlags);
6422 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6423
6424 IEM_MC_ADVANCE_RIP();
6425 IEM_MC_END();
6426 return VINF_SUCCESS;
6427
6428 case IEMMODE_32BIT:
6429 IEM_MC_BEGIN(4, 0);
6430 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6431 IEM_MC_ARG(uint32_t, u32Src, 1);
6432 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6433 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6434
6435 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6436 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6437 IEM_MC_REF_EFLAGS(pEFlags);
6438 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6439
6440 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6441 IEM_MC_ADVANCE_RIP();
6442 IEM_MC_END();
6443 return VINF_SUCCESS;
6444
6445 case IEMMODE_64BIT:
6446 IEM_MC_BEGIN(4, 0);
6447 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6448 IEM_MC_ARG(uint64_t, u64Src, 1);
6449 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6450 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6451
6452 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6453 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6454 IEM_MC_REF_EFLAGS(pEFlags);
6455 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6456
6457 IEM_MC_ADVANCE_RIP();
6458 IEM_MC_END();
6459 return VINF_SUCCESS;
6460
6461 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6462 }
6463 }
6464 else
6465 {
6466 switch (pVCpu->iem.s.enmEffOpSize)
6467 {
6468 case IEMMODE_16BIT:
6469 IEM_MC_BEGIN(4, 2);
6470 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6471 IEM_MC_ARG(uint16_t, u16Src, 1);
6472 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6473 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6475
6476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6477 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6478 IEM_MC_ASSIGN(cShiftArg, cShift);
6479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6480 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6481 IEM_MC_FETCH_EFLAGS(EFlags);
6482 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6483 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6484
6485 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6486 IEM_MC_COMMIT_EFLAGS(EFlags);
6487 IEM_MC_ADVANCE_RIP();
6488 IEM_MC_END();
6489 return VINF_SUCCESS;
6490
6491 case IEMMODE_32BIT:
6492 IEM_MC_BEGIN(4, 2);
6493 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6494 IEM_MC_ARG(uint32_t, u32Src, 1);
6495 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6496 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6498
6499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6500 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6501 IEM_MC_ASSIGN(cShiftArg, cShift);
6502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6503 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6504 IEM_MC_FETCH_EFLAGS(EFlags);
6505 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6506 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6507
6508 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6509 IEM_MC_COMMIT_EFLAGS(EFlags);
6510 IEM_MC_ADVANCE_RIP();
6511 IEM_MC_END();
6512 return VINF_SUCCESS;
6513
6514 case IEMMODE_64BIT:
6515 IEM_MC_BEGIN(4, 2);
6516 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6517 IEM_MC_ARG(uint64_t, u64Src, 1);
6518 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6519 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6521
6522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6523 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6524 IEM_MC_ASSIGN(cShiftArg, cShift);
6525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6526 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6527 IEM_MC_FETCH_EFLAGS(EFlags);
6528 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6529 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6530
6531 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6532 IEM_MC_COMMIT_EFLAGS(EFlags);
6533 IEM_MC_ADVANCE_RIP();
6534 IEM_MC_END();
6535 return VINF_SUCCESS;
6536
6537 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6538 }
6539 }
6540}
6541
6542
6543/**
6544 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6545 */
6546FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6547{
6548 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6549 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6550
6551 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6552 {
6553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6554
6555 switch (pVCpu->iem.s.enmEffOpSize)
6556 {
6557 case IEMMODE_16BIT:
6558 IEM_MC_BEGIN(4, 0);
6559 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6560 IEM_MC_ARG(uint16_t, u16Src, 1);
6561 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6562 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6563
6564 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6565 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6566 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6567 IEM_MC_REF_EFLAGS(pEFlags);
6568 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6569
6570 IEM_MC_ADVANCE_RIP();
6571 IEM_MC_END();
6572 return VINF_SUCCESS;
6573
6574 case IEMMODE_32BIT:
6575 IEM_MC_BEGIN(4, 0);
6576 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6577 IEM_MC_ARG(uint32_t, u32Src, 1);
6578 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6579 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6580
6581 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6582 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6583 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6584 IEM_MC_REF_EFLAGS(pEFlags);
6585 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6586
6587 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6588 IEM_MC_ADVANCE_RIP();
6589 IEM_MC_END();
6590 return VINF_SUCCESS;
6591
6592 case IEMMODE_64BIT:
6593 IEM_MC_BEGIN(4, 0);
6594 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6595 IEM_MC_ARG(uint64_t, u64Src, 1);
6596 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6597 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6598
6599 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6600 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6601 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6602 IEM_MC_REF_EFLAGS(pEFlags);
6603 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6604
6605 IEM_MC_ADVANCE_RIP();
6606 IEM_MC_END();
6607 return VINF_SUCCESS;
6608
6609 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6610 }
6611 }
6612 else
6613 {
6614 switch (pVCpu->iem.s.enmEffOpSize)
6615 {
6616 case IEMMODE_16BIT:
6617 IEM_MC_BEGIN(4, 2);
6618 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6619 IEM_MC_ARG(uint16_t, u16Src, 1);
6620 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6621 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6623
6624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6626 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6627 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6628 IEM_MC_FETCH_EFLAGS(EFlags);
6629 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6630 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6631
6632 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6633 IEM_MC_COMMIT_EFLAGS(EFlags);
6634 IEM_MC_ADVANCE_RIP();
6635 IEM_MC_END();
6636 return VINF_SUCCESS;
6637
6638 case IEMMODE_32BIT:
6639 IEM_MC_BEGIN(4, 2);
6640 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6641 IEM_MC_ARG(uint32_t, u32Src, 1);
6642 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6643 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6645
6646 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6648 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6649 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6650 IEM_MC_FETCH_EFLAGS(EFlags);
6651 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6652 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6653
6654 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6655 IEM_MC_COMMIT_EFLAGS(EFlags);
6656 IEM_MC_ADVANCE_RIP();
6657 IEM_MC_END();
6658 return VINF_SUCCESS;
6659
6660 case IEMMODE_64BIT:
6661 IEM_MC_BEGIN(4, 2);
6662 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6663 IEM_MC_ARG(uint64_t, u64Src, 1);
6664 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6665 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6666 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6667
6668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6670 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6671 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6672 IEM_MC_FETCH_EFLAGS(EFlags);
6673 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6674 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6675
6676 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6677 IEM_MC_COMMIT_EFLAGS(EFlags);
6678 IEM_MC_ADVANCE_RIP();
6679 IEM_MC_END();
6680 return VINF_SUCCESS;
6681
6682 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6683 }
6684 }
6685}
6686
6687
6688
6689/** Opcode 0x0f 0xa4. */
6690FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6691{
6692 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6693 IEMOP_HLP_MIN_386();
6694 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6695}
6696
6697
6698/** Opcode 0x0f 0xa5. */
6699FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6700{
6701 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6702 IEMOP_HLP_MIN_386();
6703 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6704}
6705
6706
6707/** Opcode 0x0f 0xa8. */
6708FNIEMOP_DEF(iemOp_push_gs)
6709{
6710 IEMOP_MNEMONIC(push_gs, "push gs");
6711 IEMOP_HLP_MIN_386();
6712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6713 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6714}
6715
6716
6717/** Opcode 0x0f 0xa9. */
6718FNIEMOP_DEF(iemOp_pop_gs)
6719{
6720 IEMOP_MNEMONIC(pop_gs, "pop gs");
6721 IEMOP_HLP_MIN_386();
6722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6723 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6724}
6725
6726
6727/** Opcode 0x0f 0xaa. */
6728FNIEMOP_DEF(iemOp_rsm)
6729{
6730 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
6731 IEMOP_HLP_MIN_386(); /* 386SL and later. */
6732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6733 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
6734}
6735
6736
6737
6738/** Opcode 0x0f 0xab. */
6739FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6740{
6741 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6742 IEMOP_HLP_MIN_386();
6743 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6744}
6745
6746
6747/** Opcode 0x0f 0xac. */
6748FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6749{
6750 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6751 IEMOP_HLP_MIN_386();
6752 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6753}
6754
6755
6756/** Opcode 0x0f 0xad. */
6757FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6758{
6759 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6760 IEMOP_HLP_MIN_386();
6761 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6762}
6763
6764
6765/** Opcode 0x0f 0xae mem/0. */
6766FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6767{
6768 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6769 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6770 return IEMOP_RAISE_INVALID_OPCODE();
6771
6772 IEM_MC_BEGIN(3, 1);
6773 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6774 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6775 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6778 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6779 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6780 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6781 IEM_MC_END();
6782 return VINF_SUCCESS;
6783}
6784
6785
6786/** Opcode 0x0f 0xae mem/1. */
6787FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6788{
6789 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6790 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6791 return IEMOP_RAISE_INVALID_OPCODE();
6792
6793 IEM_MC_BEGIN(3, 1);
6794 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6795 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6796 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6799 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6800 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6801 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6802 IEM_MC_END();
6803 return VINF_SUCCESS;
6804}
6805
6806
6807/**
6808 * @opmaps grp15
6809 * @opcode !11/2
6810 * @oppfx none
6811 * @opcpuid sse
6812 * @opgroup og_sse_mxcsrsm
6813 * @opxcpttype 5
6814 * @optest op1=0 -> mxcsr=0
6815 * @optest op1=0x2083 -> mxcsr=0x2083
6816 * @optest op1=0xfffffffe -> value.xcpt=0xd
6817 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6818 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6819 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6820 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6821 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6822 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6823 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6824 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6825 */
6826FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6827{
6828 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6829 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6830 return IEMOP_RAISE_INVALID_OPCODE();
6831
6832 IEM_MC_BEGIN(2, 0);
6833 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6834 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6837 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6838 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6839 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6840 IEM_MC_END();
6841 return VINF_SUCCESS;
6842}
6843
6844
6845/**
6846 * @opmaps grp15
6847 * @opcode !11/3
6848 * @oppfx none
6849 * @opcpuid sse
6850 * @opgroup og_sse_mxcsrsm
6851 * @opxcpttype 5
6852 * @optest mxcsr=0 -> op1=0
6853 * @optest mxcsr=0x2083 -> op1=0x2083
6854 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6855 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6856 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6857 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6858 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6859 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6860 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6861 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6862 */
6863FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6864{
6865 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6866 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6867 return IEMOP_RAISE_INVALID_OPCODE();
6868
6869 IEM_MC_BEGIN(2, 0);
6870 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6871 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6874 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6875 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6876 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6877 IEM_MC_END();
6878 return VINF_SUCCESS;
6879}
6880
6881
6882/**
6883 * @opmaps grp15
6884 * @opcode !11/4
6885 * @oppfx none
6886 * @opcpuid xsave
6887 * @opgroup og_system
6888 * @opxcpttype none
6889 */
6890FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6891{
6892 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
6893 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6894 return IEMOP_RAISE_INVALID_OPCODE();
6895
6896 IEM_MC_BEGIN(3, 0);
6897 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6898 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6899 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6902 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6903 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6904 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6905 IEM_MC_END();
6906 return VINF_SUCCESS;
6907}
6908
6909
6910/**
6911 * @opmaps grp15
6912 * @opcode !11/5
6913 * @oppfx none
6914 * @opcpuid xsave
6915 * @opgroup og_system
6916 * @opxcpttype none
6917 */
6918FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6919{
6920 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
6921 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6922 return IEMOP_RAISE_INVALID_OPCODE();
6923
6924 IEM_MC_BEGIN(3, 0);
6925 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6926 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6927 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6928 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6930 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6931 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6932 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6933 IEM_MC_END();
6934 return VINF_SUCCESS;
6935}
6936
6937/** Opcode 0x0f 0xae mem/6. */
6938FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6939
6940/**
6941 * @opmaps grp15
6942 * @opcode !11/7
6943 * @oppfx none
6944 * @opcpuid clfsh
6945 * @opgroup og_cachectl
6946 * @optest op1=1 ->
6947 */
6948FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6949{
6950 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6951 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6952 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6953
6954 IEM_MC_BEGIN(2, 0);
6955 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6956 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6959 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6960 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6961 IEM_MC_END();
6962 return VINF_SUCCESS;
6963}
6964
6965/**
6966 * @opmaps grp15
6967 * @opcode !11/7
6968 * @oppfx 0x66
6969 * @opcpuid clflushopt
6970 * @opgroup og_cachectl
6971 * @optest op1=1 ->
6972 */
6973FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6974{
6975 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6976 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6977 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6978
6979 IEM_MC_BEGIN(2, 0);
6980 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6981 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6984 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6985 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6986 IEM_MC_END();
6987 return VINF_SUCCESS;
6988}
6989
6990
6991/** Opcode 0x0f 0xae 11b/5. */
6992FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6993{
6994 RT_NOREF_PV(bRm);
6995 IEMOP_MNEMONIC(lfence, "lfence");
6996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6997 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6998 return IEMOP_RAISE_INVALID_OPCODE();
6999
7000 IEM_MC_BEGIN(0, 0);
7001 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7002 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
7003 else
7004 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7005 IEM_MC_ADVANCE_RIP();
7006 IEM_MC_END();
7007 return VINF_SUCCESS;
7008}
7009
7010
7011/** Opcode 0x0f 0xae 11b/6. */
7012FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
7013{
7014 RT_NOREF_PV(bRm);
7015 IEMOP_MNEMONIC(mfence, "mfence");
7016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7017 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7018 return IEMOP_RAISE_INVALID_OPCODE();
7019
7020 IEM_MC_BEGIN(0, 0);
7021 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7022 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
7023 else
7024 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7025 IEM_MC_ADVANCE_RIP();
7026 IEM_MC_END();
7027 return VINF_SUCCESS;
7028}
7029
7030
7031/** Opcode 0x0f 0xae 11b/7. */
7032FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
7033{
7034 RT_NOREF_PV(bRm);
7035 IEMOP_MNEMONIC(sfence, "sfence");
7036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7037 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7038 return IEMOP_RAISE_INVALID_OPCODE();
7039
7040 IEM_MC_BEGIN(0, 0);
7041 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7042 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
7043 else
7044 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7045 IEM_MC_ADVANCE_RIP();
7046 IEM_MC_END();
7047 return VINF_SUCCESS;
7048}
7049
7050
7051/** Opcode 0xf3 0x0f 0xae 11b/0. */
7052FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
7053{
7054 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
7055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7056 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7057 {
7058 IEM_MC_BEGIN(1, 0);
7059 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7060 IEM_MC_ARG(uint64_t, u64Dst, 0);
7061 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
7062 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
7063 IEM_MC_ADVANCE_RIP();
7064 IEM_MC_END();
7065 }
7066 else
7067 {
7068 IEM_MC_BEGIN(1, 0);
7069 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7070 IEM_MC_ARG(uint32_t, u32Dst, 0);
7071 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
7072 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
7073 IEM_MC_ADVANCE_RIP();
7074 IEM_MC_END();
7075 }
7076 return VINF_SUCCESS;
7077}
7078
7079
7080/** Opcode 0xf3 0x0f 0xae 11b/1. */
7081FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
7082{
7083 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
7084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7085 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7086 {
7087 IEM_MC_BEGIN(1, 0);
7088 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7089 IEM_MC_ARG(uint64_t, u64Dst, 0);
7090 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
7091 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
7092 IEM_MC_ADVANCE_RIP();
7093 IEM_MC_END();
7094 }
7095 else
7096 {
7097 IEM_MC_BEGIN(1, 0);
7098 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7099 IEM_MC_ARG(uint32_t, u32Dst, 0);
7100 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
7101 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
7102 IEM_MC_ADVANCE_RIP();
7103 IEM_MC_END();
7104 }
7105 return VINF_SUCCESS;
7106}
7107
7108
7109/** Opcode 0xf3 0x0f 0xae 11b/2. */
7110FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
7111{
7112 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
7113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7114 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7115 {
7116 IEM_MC_BEGIN(1, 0);
7117 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7118 IEM_MC_ARG(uint64_t, u64Dst, 0);
7119 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7120 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7121 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
7122 IEM_MC_ADVANCE_RIP();
7123 IEM_MC_END();
7124 }
7125 else
7126 {
7127 IEM_MC_BEGIN(1, 0);
7128 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7129 IEM_MC_ARG(uint32_t, u32Dst, 0);
7130 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7131 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
7132 IEM_MC_ADVANCE_RIP();
7133 IEM_MC_END();
7134 }
7135 return VINF_SUCCESS;
7136}
7137
7138
7139/** Opcode 0xf3 0x0f 0xae 11b/3. */
7140FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
7141{
7142 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
7143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7144 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7145 {
7146 IEM_MC_BEGIN(1, 0);
7147 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7148 IEM_MC_ARG(uint64_t, u64Dst, 0);
7149 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7150 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7151 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
7152 IEM_MC_ADVANCE_RIP();
7153 IEM_MC_END();
7154 }
7155 else
7156 {
7157 IEM_MC_BEGIN(1, 0);
7158 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7159 IEM_MC_ARG(uint32_t, u32Dst, 0);
7160 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7161 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
7162 IEM_MC_ADVANCE_RIP();
7163 IEM_MC_END();
7164 }
7165 return VINF_SUCCESS;
7166}
7167
7168
7169/**
7170 * Group 15 jump table for register variant.
7171 */
7172IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
7173{ /* pfx: none, 066h, 0f3h, 0f2h */
7174 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
7175 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
7176 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
7177 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
7178 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7179 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7180 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7181 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7182};
7183AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
7184
7185
7186/**
7187 * Group 15 jump table for memory variant.
7188 */
7189IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
7190{ /* pfx: none, 066h, 0f3h, 0f2h */
7191 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7192 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7193 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7194 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7195 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7196 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7197 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7198 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7199};
7200AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
7201
7202
7203/** Opcode 0x0f 0xae. */
7204FNIEMOP_DEF(iemOp_Grp15)
7205{
7206 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
7207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7208 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7209 /* register, register */
7210 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7211 + pVCpu->iem.s.idxPrefix], bRm);
7212 /* memory, register */
7213 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7214 + pVCpu->iem.s.idxPrefix], bRm);
7215}
7216
7217
7218/** Opcode 0x0f 0xaf. */
7219FNIEMOP_DEF(iemOp_imul_Gv_Ev)
7220{
7221 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
7222 IEMOP_HLP_MIN_386();
7223 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7224 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
7225}
7226
7227
7228/** Opcode 0x0f 0xb0. */
7229FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
7230{
7231 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
7232 IEMOP_HLP_MIN_486();
7233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7234
7235 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7236 {
7237 IEMOP_HLP_DONE_DECODING();
7238 IEM_MC_BEGIN(4, 0);
7239 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7240 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7241 IEM_MC_ARG(uint8_t, u8Src, 2);
7242 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7243
7244 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7245 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7246 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
7247 IEM_MC_REF_EFLAGS(pEFlags);
7248 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7249 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7250 else
7251 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7252
7253 IEM_MC_ADVANCE_RIP();
7254 IEM_MC_END();
7255 }
7256 else
7257 {
7258 IEM_MC_BEGIN(4, 3);
7259 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7260 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7261 IEM_MC_ARG(uint8_t, u8Src, 2);
7262 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7264 IEM_MC_LOCAL(uint8_t, u8Al);
7265
7266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7267 IEMOP_HLP_DONE_DECODING();
7268 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7269 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7270 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
7271 IEM_MC_FETCH_EFLAGS(EFlags);
7272 IEM_MC_REF_LOCAL(pu8Al, u8Al);
7273 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7274 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7275 else
7276 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7277
7278 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7279 IEM_MC_COMMIT_EFLAGS(EFlags);
7280 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
7281 IEM_MC_ADVANCE_RIP();
7282 IEM_MC_END();
7283 }
7284 return VINF_SUCCESS;
7285}
7286
7287/** Opcode 0x0f 0xb1. */
7288FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
7289{
7290 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
7291 IEMOP_HLP_MIN_486();
7292 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7293
7294 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7295 {
7296 IEMOP_HLP_DONE_DECODING();
7297 switch (pVCpu->iem.s.enmEffOpSize)
7298 {
7299 case IEMMODE_16BIT:
7300 IEM_MC_BEGIN(4, 0);
7301 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7302 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7303 IEM_MC_ARG(uint16_t, u16Src, 2);
7304 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7305
7306 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7307 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7308 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
7309 IEM_MC_REF_EFLAGS(pEFlags);
7310 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7311 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7312 else
7313 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7314
7315 IEM_MC_ADVANCE_RIP();
7316 IEM_MC_END();
7317 return VINF_SUCCESS;
7318
7319 case IEMMODE_32BIT:
7320 IEM_MC_BEGIN(4, 0);
7321 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7322 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7323 IEM_MC_ARG(uint32_t, u32Src, 2);
7324 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7325
7326 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7327 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7328 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
7329 IEM_MC_REF_EFLAGS(pEFlags);
7330 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7331 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7332 else
7333 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7334
7335 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
7336 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7337 IEM_MC_ADVANCE_RIP();
7338 IEM_MC_END();
7339 return VINF_SUCCESS;
7340
7341 case IEMMODE_64BIT:
7342 IEM_MC_BEGIN(4, 0);
7343 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7344 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7345#ifdef RT_ARCH_X86
7346 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7347#else
7348 IEM_MC_ARG(uint64_t, u64Src, 2);
7349#endif
7350 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7351
7352 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7353 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
7354 IEM_MC_REF_EFLAGS(pEFlags);
7355#ifdef RT_ARCH_X86
7356 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7357 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7358 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7359 else
7360 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7361#else
7362 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7363 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7364 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7365 else
7366 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7367#endif
7368
7369 IEM_MC_ADVANCE_RIP();
7370 IEM_MC_END();
7371 return VINF_SUCCESS;
7372
7373 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7374 }
7375 }
7376 else
7377 {
7378 switch (pVCpu->iem.s.enmEffOpSize)
7379 {
7380 case IEMMODE_16BIT:
7381 IEM_MC_BEGIN(4, 3);
7382 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7383 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7384 IEM_MC_ARG(uint16_t, u16Src, 2);
7385 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7387 IEM_MC_LOCAL(uint16_t, u16Ax);
7388
7389 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7390 IEMOP_HLP_DONE_DECODING();
7391 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7392 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7393 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
7394 IEM_MC_FETCH_EFLAGS(EFlags);
7395 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
7396 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7397 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7398 else
7399 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7400
7401 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7402 IEM_MC_COMMIT_EFLAGS(EFlags);
7403 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
7404 IEM_MC_ADVANCE_RIP();
7405 IEM_MC_END();
7406 return VINF_SUCCESS;
7407
7408 case IEMMODE_32BIT:
7409 IEM_MC_BEGIN(4, 3);
7410 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7411 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7412 IEM_MC_ARG(uint32_t, u32Src, 2);
7413 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7415 IEM_MC_LOCAL(uint32_t, u32Eax);
7416
7417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7418 IEMOP_HLP_DONE_DECODING();
7419 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7420 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7421 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
7422 IEM_MC_FETCH_EFLAGS(EFlags);
7423 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
7424 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7425 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7426 else
7427 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7428
7429 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7430 IEM_MC_COMMIT_EFLAGS(EFlags);
7431 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
7432 IEM_MC_ADVANCE_RIP();
7433 IEM_MC_END();
7434 return VINF_SUCCESS;
7435
7436 case IEMMODE_64BIT:
7437 IEM_MC_BEGIN(4, 3);
7438 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7439 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7440#ifdef RT_ARCH_X86
7441 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7442#else
7443 IEM_MC_ARG(uint64_t, u64Src, 2);
7444#endif
7445 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7447 IEM_MC_LOCAL(uint64_t, u64Rax);
7448
7449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7450 IEMOP_HLP_DONE_DECODING();
7451 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7452 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
7453 IEM_MC_FETCH_EFLAGS(EFlags);
7454 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
7455#ifdef RT_ARCH_X86
7456 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7457 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7458 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7459 else
7460 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7461#else
7462 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7463 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7464 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7465 else
7466 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7467#endif
7468
7469 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7470 IEM_MC_COMMIT_EFLAGS(EFlags);
7471 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
7472 IEM_MC_ADVANCE_RIP();
7473 IEM_MC_END();
7474 return VINF_SUCCESS;
7475
7476 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7477 }
7478 }
7479}
7480
7481
7482FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
7483{
7484 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
7485 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
7486
7487 switch (pVCpu->iem.s.enmEffOpSize)
7488 {
7489 case IEMMODE_16BIT:
7490 IEM_MC_BEGIN(5, 1);
7491 IEM_MC_ARG(uint16_t, uSel, 0);
7492 IEM_MC_ARG(uint16_t, offSeg, 1);
7493 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7494 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7495 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7496 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7499 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7500 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
7501 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7502 IEM_MC_END();
7503 return VINF_SUCCESS;
7504
7505 case IEMMODE_32BIT:
7506 IEM_MC_BEGIN(5, 1);
7507 IEM_MC_ARG(uint16_t, uSel, 0);
7508 IEM_MC_ARG(uint32_t, offSeg, 1);
7509 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7510 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7511 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7512 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7515 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7516 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
7517 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7518 IEM_MC_END();
7519 return VINF_SUCCESS;
7520
7521 case IEMMODE_64BIT:
7522 IEM_MC_BEGIN(5, 1);
7523 IEM_MC_ARG(uint16_t, uSel, 0);
7524 IEM_MC_ARG(uint64_t, offSeg, 1);
7525 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7526 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7527 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7528 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7531 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
7532 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7533 else
7534 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7535 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
7536 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7537 IEM_MC_END();
7538 return VINF_SUCCESS;
7539
7540 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7541 }
7542}
7543
7544
7545/** Opcode 0x0f 0xb2. */
7546FNIEMOP_DEF(iemOp_lss_Gv_Mp)
7547{
7548 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
7549 IEMOP_HLP_MIN_386();
7550 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7551 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7552 return IEMOP_RAISE_INVALID_OPCODE();
7553 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
7554}
7555
7556
7557/** Opcode 0x0f 0xb3. */
7558FNIEMOP_DEF(iemOp_btr_Ev_Gv)
7559{
7560 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
7561 IEMOP_HLP_MIN_386();
7562 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
7563}
7564
7565
7566/** Opcode 0x0f 0xb4. */
7567FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
7568{
7569 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
7570 IEMOP_HLP_MIN_386();
7571 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7572 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7573 return IEMOP_RAISE_INVALID_OPCODE();
7574 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
7575}
7576
7577
7578/** Opcode 0x0f 0xb5. */
7579FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
7580{
7581 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
7582 IEMOP_HLP_MIN_386();
7583 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7584 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7585 return IEMOP_RAISE_INVALID_OPCODE();
7586 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
7587}
7588
7589
7590/** Opcode 0x0f 0xb6. */
7591FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
7592{
7593 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
7594 IEMOP_HLP_MIN_386();
7595
7596 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7597
7598 /*
7599 * If rm is denoting a register, no more instruction bytes.
7600 */
7601 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7602 {
7603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7604 switch (pVCpu->iem.s.enmEffOpSize)
7605 {
7606 case IEMMODE_16BIT:
7607 IEM_MC_BEGIN(0, 1);
7608 IEM_MC_LOCAL(uint16_t, u16Value);
7609 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7610 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7611 IEM_MC_ADVANCE_RIP();
7612 IEM_MC_END();
7613 return VINF_SUCCESS;
7614
7615 case IEMMODE_32BIT:
7616 IEM_MC_BEGIN(0, 1);
7617 IEM_MC_LOCAL(uint32_t, u32Value);
7618 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7619 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7620 IEM_MC_ADVANCE_RIP();
7621 IEM_MC_END();
7622 return VINF_SUCCESS;
7623
7624 case IEMMODE_64BIT:
7625 IEM_MC_BEGIN(0, 1);
7626 IEM_MC_LOCAL(uint64_t, u64Value);
7627 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7628 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7629 IEM_MC_ADVANCE_RIP();
7630 IEM_MC_END();
7631 return VINF_SUCCESS;
7632
7633 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7634 }
7635 }
7636 else
7637 {
7638 /*
7639 * We're loading a register from memory.
7640 */
7641 switch (pVCpu->iem.s.enmEffOpSize)
7642 {
7643 case IEMMODE_16BIT:
7644 IEM_MC_BEGIN(0, 2);
7645 IEM_MC_LOCAL(uint16_t, u16Value);
7646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7649 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7650 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7651 IEM_MC_ADVANCE_RIP();
7652 IEM_MC_END();
7653 return VINF_SUCCESS;
7654
7655 case IEMMODE_32BIT:
7656 IEM_MC_BEGIN(0, 2);
7657 IEM_MC_LOCAL(uint32_t, u32Value);
7658 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7661 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7662 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7663 IEM_MC_ADVANCE_RIP();
7664 IEM_MC_END();
7665 return VINF_SUCCESS;
7666
7667 case IEMMODE_64BIT:
7668 IEM_MC_BEGIN(0, 2);
7669 IEM_MC_LOCAL(uint64_t, u64Value);
7670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7673 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7674 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7675 IEM_MC_ADVANCE_RIP();
7676 IEM_MC_END();
7677 return VINF_SUCCESS;
7678
7679 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7680 }
7681 }
7682}
7683
7684
7685/** Opcode 0x0f 0xb7. */
7686FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7687{
7688 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7689 IEMOP_HLP_MIN_386();
7690
7691 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7692
7693 /** @todo Not entirely sure how the operand size prefix is handled here,
7694 * assuming that it will be ignored. Would be nice to have a few
7695 * test for this. */
7696 /*
7697 * If rm is denoting a register, no more instruction bytes.
7698 */
7699 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7700 {
7701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7702 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7703 {
7704 IEM_MC_BEGIN(0, 1);
7705 IEM_MC_LOCAL(uint32_t, u32Value);
7706 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7707 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7708 IEM_MC_ADVANCE_RIP();
7709 IEM_MC_END();
7710 }
7711 else
7712 {
7713 IEM_MC_BEGIN(0, 1);
7714 IEM_MC_LOCAL(uint64_t, u64Value);
7715 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7716 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7717 IEM_MC_ADVANCE_RIP();
7718 IEM_MC_END();
7719 }
7720 }
7721 else
7722 {
7723 /*
7724 * We're loading a register from memory.
7725 */
7726 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7727 {
7728 IEM_MC_BEGIN(0, 2);
7729 IEM_MC_LOCAL(uint32_t, u32Value);
7730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7733 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7734 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7735 IEM_MC_ADVANCE_RIP();
7736 IEM_MC_END();
7737 }
7738 else
7739 {
7740 IEM_MC_BEGIN(0, 2);
7741 IEM_MC_LOCAL(uint64_t, u64Value);
7742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7745 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7746 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7747 IEM_MC_ADVANCE_RIP();
7748 IEM_MC_END();
7749 }
7750 }
7751 return VINF_SUCCESS;
7752}
7753
7754
7755/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7756FNIEMOP_UD_STUB(iemOp_jmpe);
7757/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7758FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7759
7760
7761/**
7762 * @opcode 0xb9
7763 * @opinvalid intel-modrm
7764 * @optest ->
7765 */
7766FNIEMOP_DEF(iemOp_Grp10)
7767{
7768 /*
7769 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7770 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7771 */
7772 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7773 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
7774 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7775}
7776
7777
7778/** Opcode 0x0f 0xba. */
7779FNIEMOP_DEF(iemOp_Grp8)
7780{
7781 IEMOP_HLP_MIN_386();
7782 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7783 PCIEMOPBINSIZES pImpl;
7784 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7785 {
7786 case 0: case 1: case 2: case 3:
7787 /* Both AMD and Intel want full modr/m decoding and imm8. */
7788 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7789 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7790 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7791 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7792 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7793 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7794 }
7795 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7796
7797 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7798 {
7799 /* register destination. */
7800 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7802
7803 switch (pVCpu->iem.s.enmEffOpSize)
7804 {
7805 case IEMMODE_16BIT:
7806 IEM_MC_BEGIN(3, 0);
7807 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7808 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7809 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7810
7811 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7812 IEM_MC_REF_EFLAGS(pEFlags);
7813 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7814
7815 IEM_MC_ADVANCE_RIP();
7816 IEM_MC_END();
7817 return VINF_SUCCESS;
7818
7819 case IEMMODE_32BIT:
7820 IEM_MC_BEGIN(3, 0);
7821 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7822 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7823 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7824
7825 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7826 IEM_MC_REF_EFLAGS(pEFlags);
7827 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7828
7829 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7830 IEM_MC_ADVANCE_RIP();
7831 IEM_MC_END();
7832 return VINF_SUCCESS;
7833
7834 case IEMMODE_64BIT:
7835 IEM_MC_BEGIN(3, 0);
7836 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7837 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7838 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7839
7840 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7841 IEM_MC_REF_EFLAGS(pEFlags);
7842 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7843
7844 IEM_MC_ADVANCE_RIP();
7845 IEM_MC_END();
7846 return VINF_SUCCESS;
7847
7848 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7849 }
7850 }
7851 else
7852 {
7853 /* memory destination. */
7854
7855 uint32_t fAccess;
7856 if (pImpl->pfnLockedU16)
7857 fAccess = IEM_ACCESS_DATA_RW;
7858 else /* BT */
7859 fAccess = IEM_ACCESS_DATA_R;
7860
7861 /** @todo test negative bit offsets! */
7862 switch (pVCpu->iem.s.enmEffOpSize)
7863 {
7864 case IEMMODE_16BIT:
7865 IEM_MC_BEGIN(3, 1);
7866 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7867 IEM_MC_ARG(uint16_t, u16Src, 1);
7868 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7869 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7870
7871 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7872 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7873 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7874 if (pImpl->pfnLockedU16)
7875 IEMOP_HLP_DONE_DECODING();
7876 else
7877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7878 IEM_MC_FETCH_EFLAGS(EFlags);
7879 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7880 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7881 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7882 else
7883 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7884 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7885
7886 IEM_MC_COMMIT_EFLAGS(EFlags);
7887 IEM_MC_ADVANCE_RIP();
7888 IEM_MC_END();
7889 return VINF_SUCCESS;
7890
7891 case IEMMODE_32BIT:
7892 IEM_MC_BEGIN(3, 1);
7893 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7894 IEM_MC_ARG(uint32_t, u32Src, 1);
7895 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7897
7898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7899 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7900 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7901 if (pImpl->pfnLockedU16)
7902 IEMOP_HLP_DONE_DECODING();
7903 else
7904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7905 IEM_MC_FETCH_EFLAGS(EFlags);
7906 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7907 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7908 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7909 else
7910 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7911 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7912
7913 IEM_MC_COMMIT_EFLAGS(EFlags);
7914 IEM_MC_ADVANCE_RIP();
7915 IEM_MC_END();
7916 return VINF_SUCCESS;
7917
7918 case IEMMODE_64BIT:
7919 IEM_MC_BEGIN(3, 1);
7920 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7921 IEM_MC_ARG(uint64_t, u64Src, 1);
7922 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7923 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7924
7925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7926 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7927 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7928 if (pImpl->pfnLockedU16)
7929 IEMOP_HLP_DONE_DECODING();
7930 else
7931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7932 IEM_MC_FETCH_EFLAGS(EFlags);
7933 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7934 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7935 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7936 else
7937 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7938 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7939
7940 IEM_MC_COMMIT_EFLAGS(EFlags);
7941 IEM_MC_ADVANCE_RIP();
7942 IEM_MC_END();
7943 return VINF_SUCCESS;
7944
7945 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7946 }
7947 }
7948}
7949
7950
7951/** Opcode 0x0f 0xbb. */
7952FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7953{
7954 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7955 IEMOP_HLP_MIN_386();
7956 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7957}
7958
7959
7960/** Opcode 0x0f 0xbc. */
7961FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7962{
7963 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7964 IEMOP_HLP_MIN_386();
7965 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7966 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7967}
7968
7969
7970/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7971FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7972
7973
7974/** Opcode 0x0f 0xbd. */
7975FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7976{
7977 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7978 IEMOP_HLP_MIN_386();
7979 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7980 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7981}
7982
7983
7984/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7985FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7986
7987
7988/** Opcode 0x0f 0xbe. */
7989FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7990{
7991 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7992 IEMOP_HLP_MIN_386();
7993
7994 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7995
7996 /*
7997 * If rm is denoting a register, no more instruction bytes.
7998 */
7999 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8000 {
8001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8002 switch (pVCpu->iem.s.enmEffOpSize)
8003 {
8004 case IEMMODE_16BIT:
8005 IEM_MC_BEGIN(0, 1);
8006 IEM_MC_LOCAL(uint16_t, u16Value);
8007 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8008 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
8009 IEM_MC_ADVANCE_RIP();
8010 IEM_MC_END();
8011 return VINF_SUCCESS;
8012
8013 case IEMMODE_32BIT:
8014 IEM_MC_BEGIN(0, 1);
8015 IEM_MC_LOCAL(uint32_t, u32Value);
8016 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8017 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8018 IEM_MC_ADVANCE_RIP();
8019 IEM_MC_END();
8020 return VINF_SUCCESS;
8021
8022 case IEMMODE_64BIT:
8023 IEM_MC_BEGIN(0, 1);
8024 IEM_MC_LOCAL(uint64_t, u64Value);
8025 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8026 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8027 IEM_MC_ADVANCE_RIP();
8028 IEM_MC_END();
8029 return VINF_SUCCESS;
8030
8031 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8032 }
8033 }
8034 else
8035 {
8036 /*
8037 * We're loading a register from memory.
8038 */
8039 switch (pVCpu->iem.s.enmEffOpSize)
8040 {
8041 case IEMMODE_16BIT:
8042 IEM_MC_BEGIN(0, 2);
8043 IEM_MC_LOCAL(uint16_t, u16Value);
8044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8047 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8048 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
8049 IEM_MC_ADVANCE_RIP();
8050 IEM_MC_END();
8051 return VINF_SUCCESS;
8052
8053 case IEMMODE_32BIT:
8054 IEM_MC_BEGIN(0, 2);
8055 IEM_MC_LOCAL(uint32_t, u32Value);
8056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8057 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8059 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8060 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8061 IEM_MC_ADVANCE_RIP();
8062 IEM_MC_END();
8063 return VINF_SUCCESS;
8064
8065 case IEMMODE_64BIT:
8066 IEM_MC_BEGIN(0, 2);
8067 IEM_MC_LOCAL(uint64_t, u64Value);
8068 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8071 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8072 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8073 IEM_MC_ADVANCE_RIP();
8074 IEM_MC_END();
8075 return VINF_SUCCESS;
8076
8077 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8078 }
8079 }
8080}
8081
8082
8083/** Opcode 0x0f 0xbf. */
8084FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
8085{
8086 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
8087 IEMOP_HLP_MIN_386();
8088
8089 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8090
8091 /** @todo Not entirely sure how the operand size prefix is handled here,
8092 * assuming that it will be ignored. Would be nice to have a few
8093 * test for this. */
8094 /*
8095 * If rm is denoting a register, no more instruction bytes.
8096 */
8097 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8098 {
8099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8100 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8101 {
8102 IEM_MC_BEGIN(0, 1);
8103 IEM_MC_LOCAL(uint32_t, u32Value);
8104 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8105 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8106 IEM_MC_ADVANCE_RIP();
8107 IEM_MC_END();
8108 }
8109 else
8110 {
8111 IEM_MC_BEGIN(0, 1);
8112 IEM_MC_LOCAL(uint64_t, u64Value);
8113 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8114 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8115 IEM_MC_ADVANCE_RIP();
8116 IEM_MC_END();
8117 }
8118 }
8119 else
8120 {
8121 /*
8122 * We're loading a register from memory.
8123 */
8124 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8125 {
8126 IEM_MC_BEGIN(0, 2);
8127 IEM_MC_LOCAL(uint32_t, u32Value);
8128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8131 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8132 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8133 IEM_MC_ADVANCE_RIP();
8134 IEM_MC_END();
8135 }
8136 else
8137 {
8138 IEM_MC_BEGIN(0, 2);
8139 IEM_MC_LOCAL(uint64_t, u64Value);
8140 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8143 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8144 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8145 IEM_MC_ADVANCE_RIP();
8146 IEM_MC_END();
8147 }
8148 }
8149 return VINF_SUCCESS;
8150}
8151
8152
8153/** Opcode 0x0f 0xc0. */
8154FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
8155{
8156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8157 IEMOP_HLP_MIN_486();
8158 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
8159
8160 /*
8161 * If rm is denoting a register, no more instruction bytes.
8162 */
8163 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8164 {
8165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8166
8167 IEM_MC_BEGIN(3, 0);
8168 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8169 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8170 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8171
8172 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8173 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8174 IEM_MC_REF_EFLAGS(pEFlags);
8175 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8176
8177 IEM_MC_ADVANCE_RIP();
8178 IEM_MC_END();
8179 }
8180 else
8181 {
8182 /*
8183 * We're accessing memory.
8184 */
8185 IEM_MC_BEGIN(3, 3);
8186 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8187 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8188 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8189 IEM_MC_LOCAL(uint8_t, u8RegCopy);
8190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8191
8192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8193 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8194 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8195 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
8196 IEM_MC_FETCH_EFLAGS(EFlags);
8197 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8198 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8199 else
8200 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
8201
8202 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
8203 IEM_MC_COMMIT_EFLAGS(EFlags);
8204 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
8205 IEM_MC_ADVANCE_RIP();
8206 IEM_MC_END();
8207 return VINF_SUCCESS;
8208 }
8209 return VINF_SUCCESS;
8210}
8211
8212
8213/** Opcode 0x0f 0xc1. */
8214FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
8215{
8216 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
8217 IEMOP_HLP_MIN_486();
8218 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8219
8220 /*
8221 * If rm is denoting a register, no more instruction bytes.
8222 */
8223 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8224 {
8225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8226
8227 switch (pVCpu->iem.s.enmEffOpSize)
8228 {
8229 case IEMMODE_16BIT:
8230 IEM_MC_BEGIN(3, 0);
8231 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8232 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8233 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8234
8235 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8236 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8237 IEM_MC_REF_EFLAGS(pEFlags);
8238 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8239
8240 IEM_MC_ADVANCE_RIP();
8241 IEM_MC_END();
8242 return VINF_SUCCESS;
8243
8244 case IEMMODE_32BIT:
8245 IEM_MC_BEGIN(3, 0);
8246 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8247 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8248 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8249
8250 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8251 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8252 IEM_MC_REF_EFLAGS(pEFlags);
8253 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8254
8255 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8256 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
8257 IEM_MC_ADVANCE_RIP();
8258 IEM_MC_END();
8259 return VINF_SUCCESS;
8260
8261 case IEMMODE_64BIT:
8262 IEM_MC_BEGIN(3, 0);
8263 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8264 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8265 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8266
8267 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8268 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8269 IEM_MC_REF_EFLAGS(pEFlags);
8270 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8271
8272 IEM_MC_ADVANCE_RIP();
8273 IEM_MC_END();
8274 return VINF_SUCCESS;
8275
8276 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8277 }
8278 }
8279 else
8280 {
8281 /*
8282 * We're accessing memory.
8283 */
8284 switch (pVCpu->iem.s.enmEffOpSize)
8285 {
8286 case IEMMODE_16BIT:
8287 IEM_MC_BEGIN(3, 3);
8288 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8289 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8290 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8291 IEM_MC_LOCAL(uint16_t, u16RegCopy);
8292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8293
8294 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8295 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8296 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8297 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
8298 IEM_MC_FETCH_EFLAGS(EFlags);
8299 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8300 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8301 else
8302 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
8303
8304 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8305 IEM_MC_COMMIT_EFLAGS(EFlags);
8306 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
8307 IEM_MC_ADVANCE_RIP();
8308 IEM_MC_END();
8309 return VINF_SUCCESS;
8310
8311 case IEMMODE_32BIT:
8312 IEM_MC_BEGIN(3, 3);
8313 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8314 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8315 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8316 IEM_MC_LOCAL(uint32_t, u32RegCopy);
8317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8318
8319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8320 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8321 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8322 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
8323 IEM_MC_FETCH_EFLAGS(EFlags);
8324 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8325 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8326 else
8327 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
8328
8329 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8330 IEM_MC_COMMIT_EFLAGS(EFlags);
8331 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
8332 IEM_MC_ADVANCE_RIP();
8333 IEM_MC_END();
8334 return VINF_SUCCESS;
8335
8336 case IEMMODE_64BIT:
8337 IEM_MC_BEGIN(3, 3);
8338 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8339 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8340 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8341 IEM_MC_LOCAL(uint64_t, u64RegCopy);
8342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8343
8344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8345 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8346 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8347 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
8348 IEM_MC_FETCH_EFLAGS(EFlags);
8349 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8350 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8351 else
8352 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
8353
8354 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8355 IEM_MC_COMMIT_EFLAGS(EFlags);
8356 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
8357 IEM_MC_ADVANCE_RIP();
8358 IEM_MC_END();
8359 return VINF_SUCCESS;
8360
8361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8362 }
8363 }
8364}
8365
8366
8367/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
8368FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
8369/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
8370FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
8371/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
8372FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
8373/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
8374FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
8375
8376
8377/** Opcode 0x0f 0xc3. */
8378FNIEMOP_DEF(iemOp_movnti_My_Gy)
8379{
8380 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
8381
8382 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8383
8384 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
8385 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8386 {
8387 switch (pVCpu->iem.s.enmEffOpSize)
8388 {
8389 case IEMMODE_32BIT:
8390 IEM_MC_BEGIN(0, 2);
8391 IEM_MC_LOCAL(uint32_t, u32Value);
8392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8393
8394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8396 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8397 return IEMOP_RAISE_INVALID_OPCODE();
8398
8399 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8400 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
8401 IEM_MC_ADVANCE_RIP();
8402 IEM_MC_END();
8403 break;
8404
8405 case IEMMODE_64BIT:
8406 IEM_MC_BEGIN(0, 2);
8407 IEM_MC_LOCAL(uint64_t, u64Value);
8408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8409
8410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8412 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8413 return IEMOP_RAISE_INVALID_OPCODE();
8414
8415 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8416 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
8417 IEM_MC_ADVANCE_RIP();
8418 IEM_MC_END();
8419 break;
8420
8421 case IEMMODE_16BIT:
8422 /** @todo check this form. */
8423 return IEMOP_RAISE_INVALID_OPCODE();
8424 }
8425 }
8426 else
8427 return IEMOP_RAISE_INVALID_OPCODE();
8428 return VINF_SUCCESS;
8429}
8430/* Opcode 0x66 0x0f 0xc3 - invalid */
8431/* Opcode 0xf3 0x0f 0xc3 - invalid */
8432/* Opcode 0xf2 0x0f 0xc3 - invalid */
8433
8434/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
8435FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
8436/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
8437FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
8438/* Opcode 0xf3 0x0f 0xc4 - invalid */
8439/* Opcode 0xf2 0x0f 0xc4 - invalid */
8440
8441/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
8442FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
8443/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
8444FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
8445/* Opcode 0xf3 0x0f 0xc5 - invalid */
8446/* Opcode 0xf2 0x0f 0xc5 - invalid */
8447
8448/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
8449FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
8450/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
8451FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
8452/* Opcode 0xf3 0x0f 0xc6 - invalid */
8453/* Opcode 0xf2 0x0f 0xc6 - invalid */
8454
8455
8456/** Opcode 0x0f 0xc7 !11/1. */
8457FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
8458{
8459 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
8460
8461 IEM_MC_BEGIN(4, 3);
8462 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
8463 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
8464 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
8465 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8466 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
8467 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
8468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8469
8470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8471 IEMOP_HLP_DONE_DECODING();
8472 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8473
8474 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
8475 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
8476 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
8477
8478 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
8479 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
8480 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
8481
8482 IEM_MC_FETCH_EFLAGS(EFlags);
8483 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8484 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8485 else
8486 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8487
8488 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
8489 IEM_MC_COMMIT_EFLAGS(EFlags);
8490 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8491 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
8492 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
8493 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
8494 IEM_MC_ENDIF();
8495 IEM_MC_ADVANCE_RIP();
8496
8497 IEM_MC_END();
8498 return VINF_SUCCESS;
8499}
8500
8501
8502/** Opcode REX.W 0x0f 0xc7 !11/1. */
8503FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
8504{
8505 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
8506 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8507 {
8508#if 0
8509 RT_NOREF(bRm);
8510 IEMOP_BITCH_ABOUT_STUB();
8511 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8512#else
8513 IEM_MC_BEGIN(4, 3);
8514 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
8515 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
8516 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
8517 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8518 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
8519 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
8520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8521
8522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8523 IEMOP_HLP_DONE_DECODING();
8524 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
8525 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8526
8527 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
8528 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
8529 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
8530
8531 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
8532 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
8533 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
8534
8535 IEM_MC_FETCH_EFLAGS(EFlags);
8536# ifdef RT_ARCH_AMD64
8537 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8538 {
8539 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8540 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8541 else
8542 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8543 }
8544 else
8545# endif
8546 {
8547 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
8548 accesses and not all all atomic, which works fine on in UNI CPU guest
8549 configuration (ignoring DMA). If guest SMP is active we have no choice
8550 but to use a rendezvous callback here. Sigh. */
8551 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
8552 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8553 else
8554 {
8555 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8556 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
8557 }
8558 }
8559
8560 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
8561 IEM_MC_COMMIT_EFLAGS(EFlags);
8562 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8563 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
8564 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
8565 IEM_MC_ENDIF();
8566 IEM_MC_ADVANCE_RIP();
8567
8568 IEM_MC_END();
8569 return VINF_SUCCESS;
8570#endif
8571 }
8572 Log(("cmpxchg16b -> #UD\n"));
8573 return IEMOP_RAISE_INVALID_OPCODE();
8574}
8575
8576FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
8577{
8578 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
8579 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
8580 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
8581}
8582
8583/** Opcode 0x0f 0xc7 11/6. */
8584FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
8585
8586/** Opcode 0x0f 0xc7 !11/6. */
8587#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8588FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
8589{
8590 IEMOP_MNEMONIC(vmptrld, "vmptrld");
8591 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVInstrDiag_Vmptrld);
8592 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVInstrDiag_Vmptrld);
8593 IEM_MC_BEGIN(2, 0);
8594 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8595 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
8596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8597 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
8598 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8599 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
8600 IEM_MC_END();
8601 return VINF_SUCCESS;
8602}
8603#else
8604FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
8605#endif
8606
8607/** Opcode 0x66 0x0f 0xc7 !11/6. */
8608#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8609FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
8610{
8611 IEMOP_MNEMONIC(vmclear, "vmclear");
8612 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVInstrDiag_Vmclear);
8613 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVInstrDiag_Vmclear);
8614 IEM_MC_BEGIN(2, 0);
8615 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8616 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
8617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8618 IEMOP_HLP_DONE_DECODING();
8619 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8620 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
8621 IEM_MC_END();
8622 return VINF_SUCCESS;
8623}
8624#else
8625FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
8626#endif
8627
8628/** Opcode 0xf3 0x0f 0xc7 !11/6. */
8629#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8630FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
8631{
8632 IEMOP_MNEMONIC(vmxon, "vmxon");
8633 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVInstrDiag_Vmxon);
8634 IEM_MC_BEGIN(2, 0);
8635 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8636 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
8637 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8638 IEMOP_HLP_DONE_DECODING();
8639 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8640 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
8641 IEM_MC_END();
8642 return VINF_SUCCESS;
8643}
8644#else
8645FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
8646#endif
8647
8648/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
8649#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8650FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
8651{
8652 IEMOP_MNEMONIC(vmptrst, "vmptrst");
8653 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVInstrDiag_Vmptrst);
8654 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVInstrDiag_Vmptrst);
8655 IEM_MC_BEGIN(2, 0);
8656 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8657 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
8658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8659 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
8660 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8661 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
8662 IEM_MC_END();
8663 return VINF_SUCCESS;
8664}
8665#else
8666FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
8667#endif
8668
8669/** Opcode 0x0f 0xc7 11/7. */
8670FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
8671
8672
8673/**
8674 * Group 9 jump table for register variant.
8675 */
8676IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
8677{ /* pfx: none, 066h, 0f3h, 0f2h */
8678 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8679 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
8680 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8681 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8682 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8683 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8684 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8685 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8686};
8687AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
8688
8689
8690/**
8691 * Group 9 jump table for memory variant.
8692 */
8693IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
8694{ /* pfx: none, 066h, 0f3h, 0f2h */
8695 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8696 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
8697 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8698 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8699 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8700 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8701 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
8702 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8703};
8704AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
8705
8706
8707/** Opcode 0x0f 0xc7. */
8708FNIEMOP_DEF(iemOp_Grp9)
8709{
8710 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
8711 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8712 /* register, register */
8713 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8714 + pVCpu->iem.s.idxPrefix], bRm);
8715 /* memory, register */
8716 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8717 + pVCpu->iem.s.idxPrefix], bRm);
8718}
8719
8720
8721/**
8722 * Common 'bswap register' helper.
8723 */
8724FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
8725{
8726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8727 switch (pVCpu->iem.s.enmEffOpSize)
8728 {
8729 case IEMMODE_16BIT:
8730 IEM_MC_BEGIN(1, 0);
8731 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8732 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
8733 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
8734 IEM_MC_ADVANCE_RIP();
8735 IEM_MC_END();
8736 return VINF_SUCCESS;
8737
8738 case IEMMODE_32BIT:
8739 IEM_MC_BEGIN(1, 0);
8740 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8741 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8742 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8743 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
8744 IEM_MC_ADVANCE_RIP();
8745 IEM_MC_END();
8746 return VINF_SUCCESS;
8747
8748 case IEMMODE_64BIT:
8749 IEM_MC_BEGIN(1, 0);
8750 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8751 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8752 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
8753 IEM_MC_ADVANCE_RIP();
8754 IEM_MC_END();
8755 return VINF_SUCCESS;
8756
8757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8758 }
8759}
8760
8761
8762/** Opcode 0x0f 0xc8. */
8763FNIEMOP_DEF(iemOp_bswap_rAX_r8)
8764{
8765 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
8766 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
8767 prefix. REX.B is the correct prefix it appears. For a parallel
8768 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
8769 IEMOP_HLP_MIN_486();
8770 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8771}
8772
8773
8774/** Opcode 0x0f 0xc9. */
8775FNIEMOP_DEF(iemOp_bswap_rCX_r9)
8776{
8777 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
8778 IEMOP_HLP_MIN_486();
8779 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8780}
8781
8782
8783/** Opcode 0x0f 0xca. */
8784FNIEMOP_DEF(iemOp_bswap_rDX_r10)
8785{
8786 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
8787 IEMOP_HLP_MIN_486();
8788 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8789}
8790
8791
8792/** Opcode 0x0f 0xcb. */
8793FNIEMOP_DEF(iemOp_bswap_rBX_r11)
8794{
8795 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
8796 IEMOP_HLP_MIN_486();
8797 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8798}
8799
8800
8801/** Opcode 0x0f 0xcc. */
8802FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8803{
8804 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8805 IEMOP_HLP_MIN_486();
8806 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8807}
8808
8809
8810/** Opcode 0x0f 0xcd. */
8811FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8812{
8813 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8814 IEMOP_HLP_MIN_486();
8815 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8816}
8817
8818
8819/** Opcode 0x0f 0xce. */
8820FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8821{
8822 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8823 IEMOP_HLP_MIN_486();
8824 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8825}
8826
8827
8828/** Opcode 0x0f 0xcf. */
8829FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8830{
8831 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8832 IEMOP_HLP_MIN_486();
8833 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8834}
8835
8836
8837/* Opcode 0x0f 0xd0 - invalid */
8838/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8839FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8840/* Opcode 0xf3 0x0f 0xd0 - invalid */
8841/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8842FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8843
8844/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8845FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8846/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8847FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8848/* Opcode 0xf3 0x0f 0xd1 - invalid */
8849/* Opcode 0xf2 0x0f 0xd1 - invalid */
8850
8851/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8852FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8853/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8854FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8855/* Opcode 0xf3 0x0f 0xd2 - invalid */
8856/* Opcode 0xf2 0x0f 0xd2 - invalid */
8857
8858/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8859FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8860/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8861FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8862/* Opcode 0xf3 0x0f 0xd3 - invalid */
8863/* Opcode 0xf2 0x0f 0xd3 - invalid */
8864
8865/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8866FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8867/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8868FNIEMOP_STUB(iemOp_paddq_Vx_W);
8869/* Opcode 0xf3 0x0f 0xd4 - invalid */
8870/* Opcode 0xf2 0x0f 0xd4 - invalid */
8871
8872/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8873FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8874/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8875FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8876/* Opcode 0xf3 0x0f 0xd5 - invalid */
8877/* Opcode 0xf2 0x0f 0xd5 - invalid */
8878
8879/* Opcode 0x0f 0xd6 - invalid */
8880
8881/**
8882 * @opcode 0xd6
8883 * @oppfx 0x66
8884 * @opcpuid sse2
8885 * @opgroup og_sse2_pcksclr_datamove
8886 * @opxcpttype none
8887 * @optest op1=-1 op2=2 -> op1=2
8888 * @optest op1=0 op2=-42 -> op1=-42
8889 */
8890FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8891{
8892 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8893 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8894 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8895 {
8896 /*
8897 * Register, register.
8898 */
8899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8900 IEM_MC_BEGIN(0, 2);
8901 IEM_MC_LOCAL(uint64_t, uSrc);
8902
8903 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8904 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8905
8906 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8907 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8908
8909 IEM_MC_ADVANCE_RIP();
8910 IEM_MC_END();
8911 }
8912 else
8913 {
8914 /*
8915 * Memory, register.
8916 */
8917 IEM_MC_BEGIN(0, 2);
8918 IEM_MC_LOCAL(uint64_t, uSrc);
8919 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8920
8921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8923 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8924 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8925
8926 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8927 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8928
8929 IEM_MC_ADVANCE_RIP();
8930 IEM_MC_END();
8931 }
8932 return VINF_SUCCESS;
8933}
8934
8935
8936/**
8937 * @opcode 0xd6
8938 * @opcodesub 11 mr/reg
8939 * @oppfx f3
8940 * @opcpuid sse2
8941 * @opgroup og_sse2_simdint_datamove
8942 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8943 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8944 */
8945FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
8946{
8947 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8948 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8949 {
8950 /*
8951 * Register, register.
8952 */
8953 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8955 IEM_MC_BEGIN(0, 1);
8956 IEM_MC_LOCAL(uint64_t, uSrc);
8957
8958 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8959 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8960
8961 IEM_MC_FETCH_MREG_U64(uSrc, bRm & X86_MODRM_RM_MASK);
8962 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
8963 IEM_MC_FPU_TO_MMX_MODE();
8964
8965 IEM_MC_ADVANCE_RIP();
8966 IEM_MC_END();
8967 return VINF_SUCCESS;
8968 }
8969
8970 /**
8971 * @opdone
8972 * @opmnemonic udf30fd6mem
8973 * @opcode 0xd6
8974 * @opcodesub !11 mr/reg
8975 * @oppfx f3
8976 * @opunused intel-modrm
8977 * @opcpuid sse
8978 * @optest ->
8979 */
8980 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8981}
8982
8983
8984/**
8985 * @opcode 0xd6
8986 * @opcodesub 11 mr/reg
8987 * @oppfx f2
8988 * @opcpuid sse2
8989 * @opgroup og_sse2_simdint_datamove
8990 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8991 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8992 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
8993 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
8994 * @optest op1=-42 op2=0xfedcba9876543210
8995 * -> op1=0xfedcba9876543210 ftw=0xff
8996 */
8997FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
8998{
8999 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9000 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9001 {
9002 /*
9003 * Register, register.
9004 */
9005 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9007 IEM_MC_BEGIN(0, 1);
9008 IEM_MC_LOCAL(uint64_t, uSrc);
9009
9010 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9011 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9012
9013 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9014 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, uSrc);
9015 IEM_MC_FPU_TO_MMX_MODE();
9016
9017 IEM_MC_ADVANCE_RIP();
9018 IEM_MC_END();
9019 return VINF_SUCCESS;
9020 }
9021
9022 /**
9023 * @opdone
9024 * @opmnemonic udf20fd6mem
9025 * @opcode 0xd6
9026 * @opcodesub !11 mr/reg
9027 * @oppfx f2
9028 * @opunused intel-modrm
9029 * @opcpuid sse
9030 * @optest ->
9031 */
9032 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
9033}
9034
9035/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
9036FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
9037{
9038 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
9039 /** @todo testcase: Check that the instruction implicitly clears the high
9040 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
9041 * and opcode modifications are made to work with the whole width (not
9042 * just 128). */
9043 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
9044 /* Docs says register only. */
9045 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9046 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
9047 {
9048 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
9049 IEM_MC_BEGIN(2, 0);
9050 IEM_MC_ARG(uint64_t *, pDst, 0);
9051 IEM_MC_ARG(uint64_t const *, pSrc, 1);
9052 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
9053 IEM_MC_PREPARE_FPU_USAGE();
9054 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9055 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
9056 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
9057 IEM_MC_ADVANCE_RIP();
9058 IEM_MC_END();
9059 return VINF_SUCCESS;
9060 }
9061 return IEMOP_RAISE_INVALID_OPCODE();
9062}
9063
9064/** Opcode 0x66 0x0f 0xd7 - */
9065FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
9066{
9067 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
9068 /** @todo testcase: Check that the instruction implicitly clears the high
9069 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
9070 * and opcode modifications are made to work with the whole width (not
9071 * just 128). */
9072 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
9073 /* Docs says register only. */
9074 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9075 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
9076 {
9077 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
9078 IEM_MC_BEGIN(2, 0);
9079 IEM_MC_ARG(uint64_t *, pDst, 0);
9080 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
9081 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9082 IEM_MC_PREPARE_SSE_USAGE();
9083 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9084 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9085 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
9086 IEM_MC_ADVANCE_RIP();
9087 IEM_MC_END();
9088 return VINF_SUCCESS;
9089 }
9090 return IEMOP_RAISE_INVALID_OPCODE();
9091}
9092
9093/* Opcode 0xf3 0x0f 0xd7 - invalid */
9094/* Opcode 0xf2 0x0f 0xd7 - invalid */
9095
9096
9097/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
9098FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
9099/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
9100FNIEMOP_STUB(iemOp_psubusb_Vx_W);
9101/* Opcode 0xf3 0x0f 0xd8 - invalid */
9102/* Opcode 0xf2 0x0f 0xd8 - invalid */
9103
9104/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
9105FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
9106/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
9107FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
9108/* Opcode 0xf3 0x0f 0xd9 - invalid */
9109/* Opcode 0xf2 0x0f 0xd9 - invalid */
9110
9111/** Opcode 0x0f 0xda - pminub Pq, Qq */
9112FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
9113/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
9114FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
9115/* Opcode 0xf3 0x0f 0xda - invalid */
9116/* Opcode 0xf2 0x0f 0xda - invalid */
9117
9118/** Opcode 0x0f 0xdb - pand Pq, Qq */
9119FNIEMOP_STUB(iemOp_pand_Pq_Qq);
9120/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
9121FNIEMOP_STUB(iemOp_pand_Vx_W);
9122/* Opcode 0xf3 0x0f 0xdb - invalid */
9123/* Opcode 0xf2 0x0f 0xdb - invalid */
9124
9125/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
9126FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
9127/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
9128FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
9129/* Opcode 0xf3 0x0f 0xdc - invalid */
9130/* Opcode 0xf2 0x0f 0xdc - invalid */
9131
9132/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
9133FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
9134/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
9135FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
9136/* Opcode 0xf3 0x0f 0xdd - invalid */
9137/* Opcode 0xf2 0x0f 0xdd - invalid */
9138
9139/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
9140FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
9141/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
9142FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
9143/* Opcode 0xf3 0x0f 0xde - invalid */
9144/* Opcode 0xf2 0x0f 0xde - invalid */
9145
9146/** Opcode 0x0f 0xdf - pandn Pq, Qq */
9147FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
9148/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
9149FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
9150/* Opcode 0xf3 0x0f 0xdf - invalid */
9151/* Opcode 0xf2 0x0f 0xdf - invalid */
9152
9153/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
9154FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
9155/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
9156FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
9157/* Opcode 0xf3 0x0f 0xe0 - invalid */
9158/* Opcode 0xf2 0x0f 0xe0 - invalid */
9159
9160/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
9161FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
9162/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
9163FNIEMOP_STUB(iemOp_psraw_Vx_W);
9164/* Opcode 0xf3 0x0f 0xe1 - invalid */
9165/* Opcode 0xf2 0x0f 0xe1 - invalid */
9166
9167/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
9168FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
9169/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
9170FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
9171/* Opcode 0xf3 0x0f 0xe2 - invalid */
9172/* Opcode 0xf2 0x0f 0xe2 - invalid */
9173
9174/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
9175FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
9176/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
9177FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
9178/* Opcode 0xf3 0x0f 0xe3 - invalid */
9179/* Opcode 0xf2 0x0f 0xe3 - invalid */
9180
9181/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
9182FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
9183/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
9184FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
9185/* Opcode 0xf3 0x0f 0xe4 - invalid */
9186/* Opcode 0xf2 0x0f 0xe4 - invalid */
9187
9188/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
9189FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
9190/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
9191FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
9192/* Opcode 0xf3 0x0f 0xe5 - invalid */
9193/* Opcode 0xf2 0x0f 0xe5 - invalid */
9194
9195/* Opcode 0x0f 0xe6 - invalid */
9196/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
9197FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
9198/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
9199FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
9200/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
9201FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
9202
9203
9204/**
9205 * @opcode 0xe7
9206 * @opcodesub !11 mr/reg
9207 * @oppfx none
9208 * @opcpuid sse
9209 * @opgroup og_sse1_cachect
9210 * @opxcpttype none
9211 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
9212 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
9213 */
9214FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
9215{
9216 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9217 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9218 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9219 {
9220 /* Register, memory. */
9221 IEM_MC_BEGIN(0, 2);
9222 IEM_MC_LOCAL(uint64_t, uSrc);
9223 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9224
9225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9227 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
9228 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9229
9230 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9231 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9232 IEM_MC_FPU_TO_MMX_MODE();
9233
9234 IEM_MC_ADVANCE_RIP();
9235 IEM_MC_END();
9236 return VINF_SUCCESS;
9237 }
9238 /**
9239 * @opdone
9240 * @opmnemonic ud0fe7reg
9241 * @opcode 0xe7
9242 * @opcodesub 11 mr/reg
9243 * @oppfx none
9244 * @opunused immediate
9245 * @opcpuid sse
9246 * @optest ->
9247 */
9248 return IEMOP_RAISE_INVALID_OPCODE();
9249}
9250
9251/**
9252 * @opcode 0xe7
9253 * @opcodesub !11 mr/reg
9254 * @oppfx 0x66
9255 * @opcpuid sse2
9256 * @opgroup og_sse2_cachect
9257 * @opxcpttype 1
9258 * @optest op1=-1 op2=2 -> op1=2
9259 * @optest op1=0 op2=-42 -> op1=-42
9260 */
9261FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
9262{
9263 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9265 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9266 {
9267 /* Register, memory. */
9268 IEM_MC_BEGIN(0, 2);
9269 IEM_MC_LOCAL(RTUINT128U, uSrc);
9270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9271
9272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9274 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9275 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9276
9277 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9278 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9279
9280 IEM_MC_ADVANCE_RIP();
9281 IEM_MC_END();
9282 return VINF_SUCCESS;
9283 }
9284
9285 /**
9286 * @opdone
9287 * @opmnemonic ud660fe7reg
9288 * @opcode 0xe7
9289 * @opcodesub 11 mr/reg
9290 * @oppfx 0x66
9291 * @opunused immediate
9292 * @opcpuid sse
9293 * @optest ->
9294 */
9295 return IEMOP_RAISE_INVALID_OPCODE();
9296}
9297
9298/* Opcode 0xf3 0x0f 0xe7 - invalid */
9299/* Opcode 0xf2 0x0f 0xe7 - invalid */
9300
9301
9302/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
9303FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
9304/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
9305FNIEMOP_STUB(iemOp_psubsb_Vx_W);
9306/* Opcode 0xf3 0x0f 0xe8 - invalid */
9307/* Opcode 0xf2 0x0f 0xe8 - invalid */
9308
9309/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
9310FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
9311/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
9312FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
9313/* Opcode 0xf3 0x0f 0xe9 - invalid */
9314/* Opcode 0xf2 0x0f 0xe9 - invalid */
9315
9316/** Opcode 0x0f 0xea - pminsw Pq, Qq */
9317FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
9318/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
9319FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
9320/* Opcode 0xf3 0x0f 0xea - invalid */
9321/* Opcode 0xf2 0x0f 0xea - invalid */
9322
9323/** Opcode 0x0f 0xeb - por Pq, Qq */
9324FNIEMOP_STUB(iemOp_por_Pq_Qq);
9325/** Opcode 0x66 0x0f 0xeb - por Vx, W */
9326FNIEMOP_STUB(iemOp_por_Vx_W);
9327/* Opcode 0xf3 0x0f 0xeb - invalid */
9328/* Opcode 0xf2 0x0f 0xeb - invalid */
9329
9330/** Opcode 0x0f 0xec - paddsb Pq, Qq */
9331FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
9332/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
9333FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
9334/* Opcode 0xf3 0x0f 0xec - invalid */
9335/* Opcode 0xf2 0x0f 0xec - invalid */
9336
9337/** Opcode 0x0f 0xed - paddsw Pq, Qq */
9338FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
9339/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
9340FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
9341/* Opcode 0xf3 0x0f 0xed - invalid */
9342/* Opcode 0xf2 0x0f 0xed - invalid */
9343
9344/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
9345FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
9346/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
9347FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
9348/* Opcode 0xf3 0x0f 0xee - invalid */
9349/* Opcode 0xf2 0x0f 0xee - invalid */
9350
9351
9352/** Opcode 0x0f 0xef - pxor Pq, Qq */
9353FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
9354{
9355 IEMOP_MNEMONIC(pxor, "pxor");
9356 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
9357}
9358
9359/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
9360FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
9361{
9362 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
9363 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
9364}
9365
9366/* Opcode 0xf3 0x0f 0xef - invalid */
9367/* Opcode 0xf2 0x0f 0xef - invalid */
9368
9369/* Opcode 0x0f 0xf0 - invalid */
9370/* Opcode 0x66 0x0f 0xf0 - invalid */
9371/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
9372FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
9373
9374/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
9375FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
9376/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
9377FNIEMOP_STUB(iemOp_psllw_Vx_W);
9378/* Opcode 0xf2 0x0f 0xf1 - invalid */
9379
9380/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
9381FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
9382/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
9383FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
9384/* Opcode 0xf2 0x0f 0xf2 - invalid */
9385
9386/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
9387FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
9388/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
9389FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
9390/* Opcode 0xf2 0x0f 0xf3 - invalid */
9391
9392/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
9393FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
9394/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
9395FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
9396/* Opcode 0xf2 0x0f 0xf4 - invalid */
9397
9398/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
9399FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
9400/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
9401FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
9402/* Opcode 0xf2 0x0f 0xf5 - invalid */
9403
9404/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
9405FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
9406/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
9407FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
9408/* Opcode 0xf2 0x0f 0xf6 - invalid */
9409
9410/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
9411FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
9412/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
9413FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
9414/* Opcode 0xf2 0x0f 0xf7 - invalid */
9415
9416/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
9417FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
9418/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
9419FNIEMOP_STUB(iemOp_psubb_Vx_W);
9420/* Opcode 0xf2 0x0f 0xf8 - invalid */
9421
9422/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
9423FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
9424/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
9425FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
9426/* Opcode 0xf2 0x0f 0xf9 - invalid */
9427
9428/** Opcode 0x0f 0xfa - psubd Pq, Qq */
9429FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
9430/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
9431FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
9432/* Opcode 0xf2 0x0f 0xfa - invalid */
9433
9434/** Opcode 0x0f 0xfb - psubq Pq, Qq */
9435FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
9436/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
9437FNIEMOP_STUB(iemOp_psubq_Vx_W);
9438/* Opcode 0xf2 0x0f 0xfb - invalid */
9439
9440/** Opcode 0x0f 0xfc - paddb Pq, Qq */
9441FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
9442/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
9443FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
9444/* Opcode 0xf2 0x0f 0xfc - invalid */
9445
9446/** Opcode 0x0f 0xfd - paddw Pq, Qq */
9447FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
9448/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
9449FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
9450/* Opcode 0xf2 0x0f 0xfd - invalid */
9451
9452/** Opcode 0x0f 0xfe - paddd Pq, Qq */
9453FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
9454/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
9455FNIEMOP_STUB(iemOp_paddd_Vx_W);
9456/* Opcode 0xf2 0x0f 0xfe - invalid */
9457
9458
9459/** Opcode **** 0x0f 0xff - UD0 */
9460FNIEMOP_DEF(iemOp_ud0)
9461{
9462 IEMOP_MNEMONIC(ud0, "ud0");
9463 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
9464 {
9465 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
9466#ifndef TST_IEM_CHECK_MC
9467 RTGCPTR GCPtrEff;
9468 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
9469 if (rcStrict != VINF_SUCCESS)
9470 return rcStrict;
9471#endif
9472 IEMOP_HLP_DONE_DECODING();
9473 }
9474 return IEMOP_RAISE_INVALID_OPCODE();
9475}
9476
9477
9478
9479/**
9480 * Two byte opcode map, first byte 0x0f.
9481 *
9482 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
9483 * check if it needs updating as well when making changes.
9484 */
9485IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
9486{
9487 /* no prefix, 066h prefix f3h prefix, f2h prefix */
9488 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
9489 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
9490 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
9491 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
9492 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
9493 /* 0x05 */ IEMOP_X4(iemOp_syscall),
9494 /* 0x06 */ IEMOP_X4(iemOp_clts),
9495 /* 0x07 */ IEMOP_X4(iemOp_sysret),
9496 /* 0x08 */ IEMOP_X4(iemOp_invd),
9497 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
9498 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
9499 /* 0x0b */ IEMOP_X4(iemOp_ud2),
9500 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
9501 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
9502 /* 0x0e */ IEMOP_X4(iemOp_femms),
9503 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
9504
9505 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
9506 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
9507 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
9508 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9509 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9510 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9511 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
9512 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9513 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
9514 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
9515 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
9516 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
9517 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
9518 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
9519 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
9520 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
9521
9522 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
9523 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
9524 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
9525 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
9526 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
9527 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9528 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
9529 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9530 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9531 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9532 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
9533 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9534 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
9535 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
9536 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9537 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9538
9539 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
9540 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
9541 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
9542 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
9543 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
9544 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
9545 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
9546 /* 0x37 */ IEMOP_X4(iemOp_getsec),
9547 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
9548 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9549 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
9550 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9551 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9552 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9553 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9554 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9555
9556 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
9557 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
9558 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
9559 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
9560 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
9561 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
9562 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
9563 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
9564 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
9565 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
9566 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
9567 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
9568 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
9569 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
9570 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
9571 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
9572
9573 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9574 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
9575 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
9576 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
9577 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9578 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9579 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9580 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9581 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
9582 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
9583 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
9584 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
9585 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
9586 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
9587 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
9588 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
9589
9590 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9591 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9592 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9593 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9594 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9595 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9596 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9597 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9598 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9599 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9600 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9601 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9602 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9603 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9604 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9605 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
9606
9607 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
9608 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
9609 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
9610 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
9611 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9612 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9613 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9614 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9615
9616 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9617 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9618 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9619 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9620 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
9621 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
9622 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
9623 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
9624
9625 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
9626 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
9627 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
9628 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
9629 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
9630 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
9631 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
9632 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
9633 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
9634 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
9635 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
9636 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
9637 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
9638 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
9639 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
9640 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
9641
9642 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
9643 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
9644 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
9645 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
9646 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
9647 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
9648 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
9649 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
9650 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
9651 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
9652 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
9653 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
9654 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
9655 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
9656 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
9657 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
9658
9659 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
9660 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
9661 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
9662 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
9663 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
9664 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
9665 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
9666 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
9667 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
9668 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
9669 /* 0xaa */ IEMOP_X4(iemOp_rsm),
9670 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
9671 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
9672 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
9673 /* 0xae */ IEMOP_X4(iemOp_Grp15),
9674 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
9675
9676 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
9677 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
9678 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
9679 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
9680 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
9681 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
9682 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
9683 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
9684 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
9685 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
9686 /* 0xba */ IEMOP_X4(iemOp_Grp8),
9687 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
9688 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
9689 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
9690 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
9691 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
9692
9693 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
9694 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
9695 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
9696 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9697 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9698 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9699 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9700 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
9701 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
9702 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
9703 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
9704 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
9705 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
9706 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
9707 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
9708 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
9709
9710 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
9711 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9712 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9713 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9714 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9715 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9716 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
9717 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9718 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9719 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9720 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9721 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9722 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9723 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9724 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9725 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9726
9727 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9728 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9729 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9730 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9731 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9732 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9733 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
9734 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9735 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9736 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9737 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9738 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9739 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9740 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9741 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9742 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9743
9744 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
9745 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9746 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9747 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9748 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9749 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9750 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9751 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9752 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9753 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9754 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9755 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9756 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9757 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9758 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9759 /* 0xff */ IEMOP_X4(iemOp_ud0),
9760};
9761AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
9762
9763/** @} */
9764
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette