VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 74017

Last change on this file since 74017 was 74017, checked in by vboxsync, 6 years ago

VMM/IEM: Nested VMX: bugref:9180 VMLAUNCH skeleton.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 342.1 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 74017 2018-09-01 05:29:02Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
38 }
39
40 /* Ignore operand size here, memory refs are always 16-bit. */
41 IEM_MC_BEGIN(2, 0);
42 IEM_MC_ARG(uint16_t, iEffSeg, 0);
43 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
44 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
45 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
46 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
47 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
48 IEM_MC_END();
49 return VINF_SUCCESS;
50}
51
52
53/** Opcode 0x0f 0x00 /1. */
54FNIEMOPRM_DEF(iemOp_Grp6_str)
55{
56 IEMOP_MNEMONIC(str, "str Rv/Mw");
57 IEMOP_HLP_MIN_286();
58 IEMOP_HLP_NO_REAL_OR_V86_MODE();
59
60
61 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
62 {
63 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
64 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
65 }
66
67 /* Ignore operand size here, memory refs are always 16-bit. */
68 IEM_MC_BEGIN(2, 0);
69 IEM_MC_ARG(uint16_t, iEffSeg, 0);
70 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
71 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
72 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
73 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
74 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
75 IEM_MC_END();
76 return VINF_SUCCESS;
77}
78
79
80/** Opcode 0x0f 0x00 /2. */
81FNIEMOPRM_DEF(iemOp_Grp6_lldt)
82{
83 IEMOP_MNEMONIC(lldt, "lldt Ew");
84 IEMOP_HLP_MIN_286();
85 IEMOP_HLP_NO_REAL_OR_V86_MODE();
86
87 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
88 {
89 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
90 IEM_MC_BEGIN(1, 0);
91 IEM_MC_ARG(uint16_t, u16Sel, 0);
92 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
93 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
94 IEM_MC_END();
95 }
96 else
97 {
98 IEM_MC_BEGIN(1, 1);
99 IEM_MC_ARG(uint16_t, u16Sel, 0);
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
102 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
103 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
104 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
105 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
106 IEM_MC_END();
107 }
108 return VINF_SUCCESS;
109}
110
111
112/** Opcode 0x0f 0x00 /3. */
113FNIEMOPRM_DEF(iemOp_Grp6_ltr)
114{
115 IEMOP_MNEMONIC(ltr, "ltr Ew");
116 IEMOP_HLP_MIN_286();
117 IEMOP_HLP_NO_REAL_OR_V86_MODE();
118
119 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
120 {
121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
122 IEM_MC_BEGIN(1, 0);
123 IEM_MC_ARG(uint16_t, u16Sel, 0);
124 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
125 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
126 IEM_MC_END();
127 }
128 else
129 {
130 IEM_MC_BEGIN(1, 1);
131 IEM_MC_ARG(uint16_t, u16Sel, 0);
132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
135 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
136 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
137 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
138 IEM_MC_END();
139 }
140 return VINF_SUCCESS;
141}
142
143
144/** Opcode 0x0f 0x00 /3. */
145FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
146{
147 IEMOP_HLP_MIN_286();
148 IEMOP_HLP_NO_REAL_OR_V86_MODE();
149
150 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
151 {
152 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
153 IEM_MC_BEGIN(2, 0);
154 IEM_MC_ARG(uint16_t, u16Sel, 0);
155 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
156 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
157 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
158 IEM_MC_END();
159 }
160 else
161 {
162 IEM_MC_BEGIN(2, 1);
163 IEM_MC_ARG(uint16_t, u16Sel, 0);
164 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
167 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
168 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
169 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
170 IEM_MC_END();
171 }
172 return VINF_SUCCESS;
173}
174
175
176/** Opcode 0x0f 0x00 /4. */
177FNIEMOPRM_DEF(iemOp_Grp6_verr)
178{
179 IEMOP_MNEMONIC(verr, "verr Ew");
180 IEMOP_HLP_MIN_286();
181 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
182}
183
184
185/** Opcode 0x0f 0x00 /5. */
186FNIEMOPRM_DEF(iemOp_Grp6_verw)
187{
188 IEMOP_MNEMONIC(verw, "verw Ew");
189 IEMOP_HLP_MIN_286();
190 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
191}
192
193
194/**
195 * Group 6 jump table.
196 */
197IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
198{
199 iemOp_Grp6_sldt,
200 iemOp_Grp6_str,
201 iemOp_Grp6_lldt,
202 iemOp_Grp6_ltr,
203 iemOp_Grp6_verr,
204 iemOp_Grp6_verw,
205 iemOp_InvalidWithRM,
206 iemOp_InvalidWithRM
207};
208
209/** Opcode 0x0f 0x00. */
210FNIEMOP_DEF(iemOp_Grp6)
211{
212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
213 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
214}
215
216
217/** Opcode 0x0f 0x01 /0. */
218FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
219{
220 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
221 IEMOP_HLP_MIN_286();
222 IEMOP_HLP_64BIT_OP_SIZE();
223 IEM_MC_BEGIN(2, 1);
224 IEM_MC_ARG(uint8_t, iEffSeg, 0);
225 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
228 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
229 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
230 IEM_MC_END();
231 return VINF_SUCCESS;
232}
233
234
235/** Opcode 0x0f 0x01 /0. */
236FNIEMOP_DEF(iemOp_Grp7_vmcall)
237{
238 IEMOP_MNEMONIC(vmcall, "vmcall");
239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
240
241 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
242 want all hypercalls regardless of instruction used, and if a
243 hypercall isn't handled by GIM or HMSvm will raise an #UD.
244 (NEM/win makes ASSUMPTIONS about this behavior.) */
245 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
246}
247
248
249/** Opcode 0x0f 0x01 /0. */
250#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
251FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
252{
253 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
254 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVInstrDiag_Vmlaunch);
255 IEMOP_HLP_VMX_INSTR("vmalunch", kVmxVInstrDiag_Vmlaunch);
256 IEMOP_HLP_DONE_DECODING();
257 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
258}
259#else
260FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
261{
262 IEMOP_BITCH_ABOUT_STUB();
263 return IEMOP_RAISE_INVALID_OPCODE();
264}
265#endif
266
267
268/** Opcode 0x0f 0x01 /0. */
269FNIEMOP_DEF(iemOp_Grp7_vmresume)
270{
271 IEMOP_BITCH_ABOUT_STUB();
272 return IEMOP_RAISE_INVALID_OPCODE();
273}
274
275
276/** Opcode 0x0f 0x01 /0. */
277#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
278FNIEMOP_DEF(iemOp_Grp7_vmxoff)
279{
280 IEMOP_MNEMONIC(vmxoff, "vmxoff");
281 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVInstrDiag_Vmxoff);
282 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVInstrDiag_Vmxoff);
283 IEMOP_HLP_DONE_DECODING();
284 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
285}
286#else
287FNIEMOP_DEF(iemOp_Grp7_vmxoff)
288{
289 IEMOP_BITCH_ABOUT_STUB();
290 return IEMOP_RAISE_INVALID_OPCODE();
291}
292#endif
293
294
295/** Opcode 0x0f 0x01 /1. */
296FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
297{
298 IEMOP_MNEMONIC(sidt, "sidt Ms");
299 IEMOP_HLP_MIN_286();
300 IEMOP_HLP_64BIT_OP_SIZE();
301 IEM_MC_BEGIN(2, 1);
302 IEM_MC_ARG(uint8_t, iEffSeg, 0);
303 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
306 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
307 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
308 IEM_MC_END();
309 return VINF_SUCCESS;
310}
311
312
313/** Opcode 0x0f 0x01 /1. */
314FNIEMOP_DEF(iemOp_Grp7_monitor)
315{
316 IEMOP_MNEMONIC(monitor, "monitor");
317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
318 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
319}
320
321
322/** Opcode 0x0f 0x01 /1. */
323FNIEMOP_DEF(iemOp_Grp7_mwait)
324{
325 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
327 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
328}
329
330
331/** Opcode 0x0f 0x01 /2. */
332FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
333{
334 IEMOP_MNEMONIC(lgdt, "lgdt");
335 IEMOP_HLP_64BIT_OP_SIZE();
336 IEM_MC_BEGIN(3, 1);
337 IEM_MC_ARG(uint8_t, iEffSeg, 0);
338 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
339 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
342 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
343 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
344 IEM_MC_END();
345 return VINF_SUCCESS;
346}
347
348
349/** Opcode 0x0f 0x01 0xd0. */
350FNIEMOP_DEF(iemOp_Grp7_xgetbv)
351{
352 IEMOP_MNEMONIC(xgetbv, "xgetbv");
353 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
354 {
355 /** @todo r=ramshankar: We should use
356 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
357 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
358 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
359 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
360 }
361 return IEMOP_RAISE_INVALID_OPCODE();
362}
363
364
365/** Opcode 0x0f 0x01 0xd1. */
366FNIEMOP_DEF(iemOp_Grp7_xsetbv)
367{
368 IEMOP_MNEMONIC(xsetbv, "xsetbv");
369 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
370 {
371 /** @todo r=ramshankar: We should use
372 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
373 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
374 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
375 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
376 }
377 return IEMOP_RAISE_INVALID_OPCODE();
378}
379
380
381/** Opcode 0x0f 0x01 /3. */
382FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
383{
384 IEMOP_MNEMONIC(lidt, "lidt");
385 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
386 ? IEMMODE_64BIT
387 : pVCpu->iem.s.enmEffOpSize;
388 IEM_MC_BEGIN(3, 1);
389 IEM_MC_ARG(uint8_t, iEffSeg, 0);
390 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
391 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
394 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
395 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
396 IEM_MC_END();
397 return VINF_SUCCESS;
398}
399
400
401/** Opcode 0x0f 0x01 0xd8. */
402#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
403FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
404{
405 IEMOP_MNEMONIC(vmrun, "vmrun");
406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
407 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
408}
409#else
410FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
411#endif
412
413/** Opcode 0x0f 0x01 0xd9. */
414FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
415{
416 IEMOP_MNEMONIC(vmmcall, "vmmcall");
417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
418
419 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
420 want all hypercalls regardless of instruction used, and if a
421 hypercall isn't handled by GIM or HMSvm will raise an #UD.
422 (NEM/win makes ASSUMPTIONS about this behavior.) */
423 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
424}
425
426/** Opcode 0x0f 0x01 0xda. */
427#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
428FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
429{
430 IEMOP_MNEMONIC(vmload, "vmload");
431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
432 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
433}
434#else
435FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
436#endif
437
438
439/** Opcode 0x0f 0x01 0xdb. */
440#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
441FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
442{
443 IEMOP_MNEMONIC(vmsave, "vmsave");
444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
445 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
446}
447#else
448FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
449#endif
450
451
452/** Opcode 0x0f 0x01 0xdc. */
453#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
454FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
455{
456 IEMOP_MNEMONIC(stgi, "stgi");
457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
458 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
459}
460#else
461FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
462#endif
463
464
465/** Opcode 0x0f 0x01 0xdd. */
466#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
467FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
468{
469 IEMOP_MNEMONIC(clgi, "clgi");
470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
471 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
472}
473#else
474FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
475#endif
476
477
478/** Opcode 0x0f 0x01 0xdf. */
479#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
480FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
481{
482 IEMOP_MNEMONIC(invlpga, "invlpga");
483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
484 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
485}
486#else
487FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
488#endif
489
490
491/** Opcode 0x0f 0x01 0xde. */
492#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
493FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
494{
495 IEMOP_MNEMONIC(skinit, "skinit");
496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
497 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
498}
499#else
500FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
501#endif
502
503
504/** Opcode 0x0f 0x01 /4. */
505FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
506{
507 IEMOP_MNEMONIC(smsw, "smsw");
508 IEMOP_HLP_MIN_286();
509 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
510 {
511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
512 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
513 }
514
515 /* Ignore operand size here, memory refs are always 16-bit. */
516 IEM_MC_BEGIN(2, 0);
517 IEM_MC_ARG(uint16_t, iEffSeg, 0);
518 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
521 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
522 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
523 IEM_MC_END();
524 return VINF_SUCCESS;
525}
526
527
528/** Opcode 0x0f 0x01 /6. */
529FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
530{
531 /* The operand size is effectively ignored, all is 16-bit and only the
532 lower 3-bits are used. */
533 IEMOP_MNEMONIC(lmsw, "lmsw");
534 IEMOP_HLP_MIN_286();
535 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
536 {
537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
538 IEM_MC_BEGIN(1, 0);
539 IEM_MC_ARG(uint16_t, u16Tmp, 0);
540 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
541 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
542 IEM_MC_END();
543 }
544 else
545 {
546 IEM_MC_BEGIN(1, 1);
547 IEM_MC_ARG(uint16_t, u16Tmp, 0);
548 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
551 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
552 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
553 IEM_MC_END();
554 }
555 return VINF_SUCCESS;
556}
557
558
559/** Opcode 0x0f 0x01 /7. */
560FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
561{
562 IEMOP_MNEMONIC(invlpg, "invlpg");
563 IEMOP_HLP_MIN_486();
564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
565 IEM_MC_BEGIN(1, 1);
566 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
568 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
569 IEM_MC_END();
570 return VINF_SUCCESS;
571}
572
573
574/** Opcode 0x0f 0x01 /7. */
575FNIEMOP_DEF(iemOp_Grp7_swapgs)
576{
577 IEMOP_MNEMONIC(swapgs, "swapgs");
578 IEMOP_HLP_ONLY_64BIT();
579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
580 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
581}
582
583
584/** Opcode 0x0f 0x01 /7. */
585FNIEMOP_DEF(iemOp_Grp7_rdtscp)
586{
587 IEMOP_MNEMONIC(rdtscp, "rdtscp");
588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
589 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
590}
591
592
593/**
594 * Group 7 jump table, memory variant.
595 */
596IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
597{
598 iemOp_Grp7_sgdt,
599 iemOp_Grp7_sidt,
600 iemOp_Grp7_lgdt,
601 iemOp_Grp7_lidt,
602 iemOp_Grp7_smsw,
603 iemOp_InvalidWithRM,
604 iemOp_Grp7_lmsw,
605 iemOp_Grp7_invlpg
606};
607
608
609/** Opcode 0x0f 0x01. */
610FNIEMOP_DEF(iemOp_Grp7)
611{
612 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
613 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
614 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
615
616 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
617 {
618 case 0:
619 switch (bRm & X86_MODRM_RM_MASK)
620 {
621 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
622 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
623 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
624 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
625 }
626 return IEMOP_RAISE_INVALID_OPCODE();
627
628 case 1:
629 switch (bRm & X86_MODRM_RM_MASK)
630 {
631 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
632 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
633 }
634 return IEMOP_RAISE_INVALID_OPCODE();
635
636 case 2:
637 switch (bRm & X86_MODRM_RM_MASK)
638 {
639 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
640 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
641 }
642 return IEMOP_RAISE_INVALID_OPCODE();
643
644 case 3:
645 switch (bRm & X86_MODRM_RM_MASK)
646 {
647 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
648 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
649 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
650 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
651 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
652 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
653 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
654 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
655 IEM_NOT_REACHED_DEFAULT_CASE_RET();
656 }
657
658 case 4:
659 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
660
661 case 5:
662 return IEMOP_RAISE_INVALID_OPCODE();
663
664 case 6:
665 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
666
667 case 7:
668 switch (bRm & X86_MODRM_RM_MASK)
669 {
670 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
671 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
672 }
673 return IEMOP_RAISE_INVALID_OPCODE();
674
675 IEM_NOT_REACHED_DEFAULT_CASE_RET();
676 }
677}
678
679/** Opcode 0x0f 0x00 /3. */
680FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
681{
682 IEMOP_HLP_NO_REAL_OR_V86_MODE();
683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
684
685 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
686 {
687 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
688 switch (pVCpu->iem.s.enmEffOpSize)
689 {
690 case IEMMODE_16BIT:
691 {
692 IEM_MC_BEGIN(3, 0);
693 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
694 IEM_MC_ARG(uint16_t, u16Sel, 1);
695 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
696
697 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
698 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
699 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
700
701 IEM_MC_END();
702 return VINF_SUCCESS;
703 }
704
705 case IEMMODE_32BIT:
706 case IEMMODE_64BIT:
707 {
708 IEM_MC_BEGIN(3, 0);
709 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
710 IEM_MC_ARG(uint16_t, u16Sel, 1);
711 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
712
713 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
714 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
715 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
716
717 IEM_MC_END();
718 return VINF_SUCCESS;
719 }
720
721 IEM_NOT_REACHED_DEFAULT_CASE_RET();
722 }
723 }
724 else
725 {
726 switch (pVCpu->iem.s.enmEffOpSize)
727 {
728 case IEMMODE_16BIT:
729 {
730 IEM_MC_BEGIN(3, 1);
731 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
732 IEM_MC_ARG(uint16_t, u16Sel, 1);
733 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
735
736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
737 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
738
739 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
740 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
741 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
742
743 IEM_MC_END();
744 return VINF_SUCCESS;
745 }
746
747 case IEMMODE_32BIT:
748 case IEMMODE_64BIT:
749 {
750 IEM_MC_BEGIN(3, 1);
751 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
752 IEM_MC_ARG(uint16_t, u16Sel, 1);
753 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
755
756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
757 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
758/** @todo testcase: make sure it's a 16-bit read. */
759
760 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
761 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
762 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
763
764 IEM_MC_END();
765 return VINF_SUCCESS;
766 }
767
768 IEM_NOT_REACHED_DEFAULT_CASE_RET();
769 }
770 }
771}
772
773
774
775/** Opcode 0x0f 0x02. */
776FNIEMOP_DEF(iemOp_lar_Gv_Ew)
777{
778 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
779 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
780}
781
782
783/** Opcode 0x0f 0x03. */
784FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
785{
786 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
787 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
788}
789
790
791/** Opcode 0x0f 0x05. */
792FNIEMOP_DEF(iemOp_syscall)
793{
794 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
796 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
797}
798
799
800/** Opcode 0x0f 0x06. */
801FNIEMOP_DEF(iemOp_clts)
802{
803 IEMOP_MNEMONIC(clts, "clts");
804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
805 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
806}
807
808
809/** Opcode 0x0f 0x07. */
810FNIEMOP_DEF(iemOp_sysret)
811{
812 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
814 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
815}
816
817
818/** Opcode 0x0f 0x08. */
819FNIEMOP_DEF(iemOp_invd)
820{
821 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
822 IEMOP_HLP_MIN_486();
823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
824 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
825}
826
827
828/** Opcode 0x0f 0x09. */
829FNIEMOP_DEF(iemOp_wbinvd)
830{
831 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
832 IEMOP_HLP_MIN_486();
833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
834 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
835}
836
837
838/** Opcode 0x0f 0x0b. */
839FNIEMOP_DEF(iemOp_ud2)
840{
841 IEMOP_MNEMONIC(ud2, "ud2");
842 return IEMOP_RAISE_INVALID_OPCODE();
843}
844
845/** Opcode 0x0f 0x0d. */
846FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
847{
848 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
849 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
850 {
851 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
852 return IEMOP_RAISE_INVALID_OPCODE();
853 }
854
855 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
856 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
857 {
858 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
859 return IEMOP_RAISE_INVALID_OPCODE();
860 }
861
862 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
863 {
864 case 2: /* Aliased to /0 for the time being. */
865 case 4: /* Aliased to /0 for the time being. */
866 case 5: /* Aliased to /0 for the time being. */
867 case 6: /* Aliased to /0 for the time being. */
868 case 7: /* Aliased to /0 for the time being. */
869 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
870 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
871 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
872 IEM_NOT_REACHED_DEFAULT_CASE_RET();
873 }
874
875 IEM_MC_BEGIN(0, 1);
876 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
879 /* Currently a NOP. */
880 NOREF(GCPtrEffSrc);
881 IEM_MC_ADVANCE_RIP();
882 IEM_MC_END();
883 return VINF_SUCCESS;
884}
885
886
887/** Opcode 0x0f 0x0e. */
888FNIEMOP_DEF(iemOp_femms)
889{
890 IEMOP_MNEMONIC(femms, "femms");
891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
892
893 IEM_MC_BEGIN(0,0);
894 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
895 IEM_MC_MAYBE_RAISE_FPU_XCPT();
896 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
897 IEM_MC_FPU_FROM_MMX_MODE();
898 IEM_MC_ADVANCE_RIP();
899 IEM_MC_END();
900 return VINF_SUCCESS;
901}
902
903
904/** Opcode 0x0f 0x0f. */
905FNIEMOP_DEF(iemOp_3Dnow)
906{
907 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
908 {
909 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
910 return IEMOP_RAISE_INVALID_OPCODE();
911 }
912
913#ifdef IEM_WITH_3DNOW
914 /* This is pretty sparse, use switch instead of table. */
915 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
916 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
917#else
918 IEMOP_BITCH_ABOUT_STUB();
919 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
920#endif
921}
922
923
924/**
925 * @opcode 0x10
926 * @oppfx none
927 * @opcpuid sse
928 * @opgroup og_sse_simdfp_datamove
929 * @opxcpttype 4UA
930 * @optest op1=1 op2=2 -> op1=2
931 * @optest op1=0 op2=-22 -> op1=-22
932 */
933FNIEMOP_DEF(iemOp_movups_Vps_Wps)
934{
935 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
938 {
939 /*
940 * Register, register.
941 */
942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
943 IEM_MC_BEGIN(0, 0);
944 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
945 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
946 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
947 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
948 IEM_MC_ADVANCE_RIP();
949 IEM_MC_END();
950 }
951 else
952 {
953 /*
954 * Memory, register.
955 */
956 IEM_MC_BEGIN(0, 2);
957 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
958 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
959
960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
962 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
963 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
964
965 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
966 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
967
968 IEM_MC_ADVANCE_RIP();
969 IEM_MC_END();
970 }
971 return VINF_SUCCESS;
972
973}
974
975
976/**
977 * @opcode 0x10
978 * @oppfx 0x66
979 * @opcpuid sse2
980 * @opgroup og_sse2_pcksclr_datamove
981 * @opxcpttype 4UA
982 * @optest op1=1 op2=2 -> op1=2
983 * @optest op1=0 op2=-42 -> op1=-42
984 */
985FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
986{
987 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
988 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
989 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
990 {
991 /*
992 * Register, register.
993 */
994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
995 IEM_MC_BEGIN(0, 0);
996 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
997 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
998 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
999 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1000 IEM_MC_ADVANCE_RIP();
1001 IEM_MC_END();
1002 }
1003 else
1004 {
1005 /*
1006 * Memory, register.
1007 */
1008 IEM_MC_BEGIN(0, 2);
1009 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1011
1012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1014 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1015 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1016
1017 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1018 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1019
1020 IEM_MC_ADVANCE_RIP();
1021 IEM_MC_END();
1022 }
1023 return VINF_SUCCESS;
1024}
1025
1026
1027/**
1028 * @opcode 0x10
1029 * @oppfx 0xf3
1030 * @opcpuid sse
1031 * @opgroup og_sse_simdfp_datamove
1032 * @opxcpttype 5
1033 * @optest op1=1 op2=2 -> op1=2
1034 * @optest op1=0 op2=-22 -> op1=-22
1035 */
1036FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1037{
1038 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1039 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1040 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1041 {
1042 /*
1043 * Register, register.
1044 */
1045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1046 IEM_MC_BEGIN(0, 1);
1047 IEM_MC_LOCAL(uint32_t, uSrc);
1048
1049 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1050 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1051 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1052 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1053
1054 IEM_MC_ADVANCE_RIP();
1055 IEM_MC_END();
1056 }
1057 else
1058 {
1059 /*
1060 * Memory, register.
1061 */
1062 IEM_MC_BEGIN(0, 2);
1063 IEM_MC_LOCAL(uint32_t, uSrc);
1064 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1065
1066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1068 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1069 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1070
1071 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1072 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1073
1074 IEM_MC_ADVANCE_RIP();
1075 IEM_MC_END();
1076 }
1077 return VINF_SUCCESS;
1078}
1079
1080
1081/**
1082 * @opcode 0x10
1083 * @oppfx 0xf2
1084 * @opcpuid sse2
1085 * @opgroup og_sse2_pcksclr_datamove
1086 * @opxcpttype 5
1087 * @optest op1=1 op2=2 -> op1=2
1088 * @optest op1=0 op2=-42 -> op1=-42
1089 */
1090FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1091{
1092 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1093 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1094 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1095 {
1096 /*
1097 * Register, register.
1098 */
1099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1100 IEM_MC_BEGIN(0, 1);
1101 IEM_MC_LOCAL(uint64_t, uSrc);
1102
1103 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1104 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1105 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1106 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1107
1108 IEM_MC_ADVANCE_RIP();
1109 IEM_MC_END();
1110 }
1111 else
1112 {
1113 /*
1114 * Memory, register.
1115 */
1116 IEM_MC_BEGIN(0, 2);
1117 IEM_MC_LOCAL(uint64_t, uSrc);
1118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1119
1120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1122 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1123 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1124
1125 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1126 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1127
1128 IEM_MC_ADVANCE_RIP();
1129 IEM_MC_END();
1130 }
1131 return VINF_SUCCESS;
1132}
1133
1134
1135/**
1136 * @opcode 0x11
1137 * @oppfx none
1138 * @opcpuid sse
1139 * @opgroup og_sse_simdfp_datamove
1140 * @opxcpttype 4UA
1141 * @optest op1=1 op2=2 -> op1=2
1142 * @optest op1=0 op2=-42 -> op1=-42
1143 */
1144FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1145{
1146 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1147 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1148 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1149 {
1150 /*
1151 * Register, register.
1152 */
1153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1154 IEM_MC_BEGIN(0, 0);
1155 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1156 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1157 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1158 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1159 IEM_MC_ADVANCE_RIP();
1160 IEM_MC_END();
1161 }
1162 else
1163 {
1164 /*
1165 * Memory, register.
1166 */
1167 IEM_MC_BEGIN(0, 2);
1168 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1170
1171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1173 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1174 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1175
1176 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1177 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1178
1179 IEM_MC_ADVANCE_RIP();
1180 IEM_MC_END();
1181 }
1182 return VINF_SUCCESS;
1183}
1184
1185
1186/**
1187 * @opcode 0x11
1188 * @oppfx 0x66
1189 * @opcpuid sse2
1190 * @opgroup og_sse2_pcksclr_datamove
1191 * @opxcpttype 4UA
1192 * @optest op1=1 op2=2 -> op1=2
1193 * @optest op1=0 op2=-42 -> op1=-42
1194 */
1195FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1196{
1197 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1198 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1199 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1200 {
1201 /*
1202 * Register, register.
1203 */
1204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1205 IEM_MC_BEGIN(0, 0);
1206 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1207 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1208 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1209 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1210 IEM_MC_ADVANCE_RIP();
1211 IEM_MC_END();
1212 }
1213 else
1214 {
1215 /*
1216 * Memory, register.
1217 */
1218 IEM_MC_BEGIN(0, 2);
1219 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1221
1222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1224 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1225 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1226
1227 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1228 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1229
1230 IEM_MC_ADVANCE_RIP();
1231 IEM_MC_END();
1232 }
1233 return VINF_SUCCESS;
1234}
1235
1236
1237/**
1238 * @opcode 0x11
1239 * @oppfx 0xf3
1240 * @opcpuid sse
1241 * @opgroup og_sse_simdfp_datamove
1242 * @opxcpttype 5
1243 * @optest op1=1 op2=2 -> op1=2
1244 * @optest op1=0 op2=-22 -> op1=-22
1245 */
1246FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1247{
1248 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1249 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1250 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1251 {
1252 /*
1253 * Register, register.
1254 */
1255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1256 IEM_MC_BEGIN(0, 1);
1257 IEM_MC_LOCAL(uint32_t, uSrc);
1258
1259 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1260 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1261 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1262 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1263
1264 IEM_MC_ADVANCE_RIP();
1265 IEM_MC_END();
1266 }
1267 else
1268 {
1269 /*
1270 * Memory, register.
1271 */
1272 IEM_MC_BEGIN(0, 2);
1273 IEM_MC_LOCAL(uint32_t, uSrc);
1274 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1275
1276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1278 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1279 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1280
1281 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1282 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1283
1284 IEM_MC_ADVANCE_RIP();
1285 IEM_MC_END();
1286 }
1287 return VINF_SUCCESS;
1288}
1289
1290
1291/**
1292 * @opcode 0x11
1293 * @oppfx 0xf2
1294 * @opcpuid sse2
1295 * @opgroup og_sse2_pcksclr_datamove
1296 * @opxcpttype 5
1297 * @optest op1=1 op2=2 -> op1=2
1298 * @optest op1=0 op2=-42 -> op1=-42
1299 */
1300FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1301{
1302 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1303 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1304 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1305 {
1306 /*
1307 * Register, register.
1308 */
1309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1310 IEM_MC_BEGIN(0, 1);
1311 IEM_MC_LOCAL(uint64_t, uSrc);
1312
1313 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1314 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1315 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1316 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1317
1318 IEM_MC_ADVANCE_RIP();
1319 IEM_MC_END();
1320 }
1321 else
1322 {
1323 /*
1324 * Memory, register.
1325 */
1326 IEM_MC_BEGIN(0, 2);
1327 IEM_MC_LOCAL(uint64_t, uSrc);
1328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1329
1330 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1332 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1333 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1334
1335 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1336 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1337
1338 IEM_MC_ADVANCE_RIP();
1339 IEM_MC_END();
1340 }
1341 return VINF_SUCCESS;
1342}
1343
1344
1345FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1346{
1347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1348 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1349 {
1350 /**
1351 * @opcode 0x12
1352 * @opcodesub 11 mr/reg
1353 * @oppfx none
1354 * @opcpuid sse
1355 * @opgroup og_sse_simdfp_datamove
1356 * @opxcpttype 5
1357 * @optest op1=1 op2=2 -> op1=2
1358 * @optest op1=0 op2=-42 -> op1=-42
1359 */
1360 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1361
1362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1363 IEM_MC_BEGIN(0, 1);
1364 IEM_MC_LOCAL(uint64_t, uSrc);
1365
1366 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1367 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1368 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1369 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1370
1371 IEM_MC_ADVANCE_RIP();
1372 IEM_MC_END();
1373 }
1374 else
1375 {
1376 /**
1377 * @opdone
1378 * @opcode 0x12
1379 * @opcodesub !11 mr/reg
1380 * @oppfx none
1381 * @opcpuid sse
1382 * @opgroup og_sse_simdfp_datamove
1383 * @opxcpttype 5
1384 * @optest op1=1 op2=2 -> op1=2
1385 * @optest op1=0 op2=-42 -> op1=-42
1386 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1387 */
1388 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1389
1390 IEM_MC_BEGIN(0, 2);
1391 IEM_MC_LOCAL(uint64_t, uSrc);
1392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1393
1394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1396 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1397 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1398
1399 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1400 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1401
1402 IEM_MC_ADVANCE_RIP();
1403 IEM_MC_END();
1404 }
1405 return VINF_SUCCESS;
1406}
1407
1408
1409/**
1410 * @opcode 0x12
1411 * @opcodesub !11 mr/reg
1412 * @oppfx 0x66
1413 * @opcpuid sse2
1414 * @opgroup og_sse2_pcksclr_datamove
1415 * @opxcpttype 5
1416 * @optest op1=1 op2=2 -> op1=2
1417 * @optest op1=0 op2=-42 -> op1=-42
1418 */
1419FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1420{
1421 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1422 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1423 {
1424 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1425
1426 IEM_MC_BEGIN(0, 2);
1427 IEM_MC_LOCAL(uint64_t, uSrc);
1428 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1429
1430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1432 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1433 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1434
1435 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1436 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1437
1438 IEM_MC_ADVANCE_RIP();
1439 IEM_MC_END();
1440 return VINF_SUCCESS;
1441 }
1442
1443 /**
1444 * @opdone
1445 * @opmnemonic ud660f12m3
1446 * @opcode 0x12
1447 * @opcodesub 11 mr/reg
1448 * @oppfx 0x66
1449 * @opunused immediate
1450 * @opcpuid sse
1451 * @optest ->
1452 */
1453 return IEMOP_RAISE_INVALID_OPCODE();
1454}
1455
1456
1457/**
1458 * @opcode 0x12
1459 * @oppfx 0xf3
1460 * @opcpuid sse3
1461 * @opgroup og_sse3_pcksclr_datamove
1462 * @opxcpttype 4
1463 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1464 * op1=0x00000002000000020000000100000001
1465 */
1466FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1467{
1468 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1470 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1471 {
1472 /*
1473 * Register, register.
1474 */
1475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1476 IEM_MC_BEGIN(2, 0);
1477 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1478 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1479
1480 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1481 IEM_MC_PREPARE_SSE_USAGE();
1482
1483 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1484 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1485 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1486
1487 IEM_MC_ADVANCE_RIP();
1488 IEM_MC_END();
1489 }
1490 else
1491 {
1492 /*
1493 * Register, memory.
1494 */
1495 IEM_MC_BEGIN(2, 2);
1496 IEM_MC_LOCAL(RTUINT128U, uSrc);
1497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1498 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1499 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1500
1501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1503 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1504 IEM_MC_PREPARE_SSE_USAGE();
1505
1506 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1507 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1508 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1509
1510 IEM_MC_ADVANCE_RIP();
1511 IEM_MC_END();
1512 }
1513 return VINF_SUCCESS;
1514}
1515
1516
1517/**
1518 * @opcode 0x12
1519 * @oppfx 0xf2
1520 * @opcpuid sse3
1521 * @opgroup og_sse3_pcksclr_datamove
1522 * @opxcpttype 5
1523 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1524 * op1=0x22222222111111112222222211111111
1525 */
1526FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1527{
1528 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1529 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1530 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1531 {
1532 /*
1533 * Register, register.
1534 */
1535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1536 IEM_MC_BEGIN(2, 0);
1537 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1538 IEM_MC_ARG(uint64_t, uSrc, 1);
1539
1540 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1541 IEM_MC_PREPARE_SSE_USAGE();
1542
1543 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1544 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1545 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1546
1547 IEM_MC_ADVANCE_RIP();
1548 IEM_MC_END();
1549 }
1550 else
1551 {
1552 /*
1553 * Register, memory.
1554 */
1555 IEM_MC_BEGIN(2, 2);
1556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1557 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1558 IEM_MC_ARG(uint64_t, uSrc, 1);
1559
1560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1562 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1563 IEM_MC_PREPARE_SSE_USAGE();
1564
1565 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1566 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1567 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1568
1569 IEM_MC_ADVANCE_RIP();
1570 IEM_MC_END();
1571 }
1572 return VINF_SUCCESS;
1573}
1574
1575
1576/**
1577 * @opcode 0x13
1578 * @opcodesub !11 mr/reg
1579 * @oppfx none
1580 * @opcpuid sse
1581 * @opgroup og_sse_simdfp_datamove
1582 * @opxcpttype 5
1583 * @optest op1=1 op2=2 -> op1=2
1584 * @optest op1=0 op2=-42 -> op1=-42
1585 */
1586FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1587{
1588 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1589 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1590 {
1591 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1592
1593 IEM_MC_BEGIN(0, 2);
1594 IEM_MC_LOCAL(uint64_t, uSrc);
1595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1596
1597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1599 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1600 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1601
1602 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1603 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1604
1605 IEM_MC_ADVANCE_RIP();
1606 IEM_MC_END();
1607 return VINF_SUCCESS;
1608 }
1609
1610 /**
1611 * @opdone
1612 * @opmnemonic ud0f13m3
1613 * @opcode 0x13
1614 * @opcodesub 11 mr/reg
1615 * @oppfx none
1616 * @opunused immediate
1617 * @opcpuid sse
1618 * @optest ->
1619 */
1620 return IEMOP_RAISE_INVALID_OPCODE();
1621}
1622
1623
1624/**
1625 * @opcode 0x13
1626 * @opcodesub !11 mr/reg
1627 * @oppfx 0x66
1628 * @opcpuid sse2
1629 * @opgroup og_sse2_pcksclr_datamove
1630 * @opxcpttype 5
1631 * @optest op1=1 op2=2 -> op1=2
1632 * @optest op1=0 op2=-42 -> op1=-42
1633 */
1634FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1635{
1636 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1637 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1638 {
1639 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1640 IEM_MC_BEGIN(0, 2);
1641 IEM_MC_LOCAL(uint64_t, uSrc);
1642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1643
1644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1646 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1647 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1648
1649 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1650 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1651
1652 IEM_MC_ADVANCE_RIP();
1653 IEM_MC_END();
1654 return VINF_SUCCESS;
1655 }
1656
1657 /**
1658 * @opdone
1659 * @opmnemonic ud660f13m3
1660 * @opcode 0x13
1661 * @opcodesub 11 mr/reg
1662 * @oppfx 0x66
1663 * @opunused immediate
1664 * @opcpuid sse
1665 * @optest ->
1666 */
1667 return IEMOP_RAISE_INVALID_OPCODE();
1668}
1669
1670
1671/**
1672 * @opmnemonic udf30f13
1673 * @opcode 0x13
1674 * @oppfx 0xf3
1675 * @opunused intel-modrm
1676 * @opcpuid sse
1677 * @optest ->
1678 * @opdone
1679 */
1680
1681/**
1682 * @opmnemonic udf20f13
1683 * @opcode 0x13
1684 * @oppfx 0xf2
1685 * @opunused intel-modrm
1686 * @opcpuid sse
1687 * @optest ->
1688 * @opdone
1689 */
1690
1691/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1692FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1693/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1694FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1695
1696/**
1697 * @opdone
1698 * @opmnemonic udf30f14
1699 * @opcode 0x14
1700 * @oppfx 0xf3
1701 * @opunused intel-modrm
1702 * @opcpuid sse
1703 * @optest ->
1704 * @opdone
1705 */
1706
1707/**
1708 * @opmnemonic udf20f14
1709 * @opcode 0x14
1710 * @oppfx 0xf2
1711 * @opunused intel-modrm
1712 * @opcpuid sse
1713 * @optest ->
1714 * @opdone
1715 */
1716
1717/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1718FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1719/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1720FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1721/* Opcode 0xf3 0x0f 0x15 - invalid */
1722/* Opcode 0xf2 0x0f 0x15 - invalid */
1723
1724/**
1725 * @opdone
1726 * @opmnemonic udf30f15
1727 * @opcode 0x15
1728 * @oppfx 0xf3
1729 * @opunused intel-modrm
1730 * @opcpuid sse
1731 * @optest ->
1732 * @opdone
1733 */
1734
1735/**
1736 * @opmnemonic udf20f15
1737 * @opcode 0x15
1738 * @oppfx 0xf2
1739 * @opunused intel-modrm
1740 * @opcpuid sse
1741 * @optest ->
1742 * @opdone
1743 */
1744
1745FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1746{
1747 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1748 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1749 {
1750 /**
1751 * @opcode 0x16
1752 * @opcodesub 11 mr/reg
1753 * @oppfx none
1754 * @opcpuid sse
1755 * @opgroup og_sse_simdfp_datamove
1756 * @opxcpttype 5
1757 * @optest op1=1 op2=2 -> op1=2
1758 * @optest op1=0 op2=-42 -> op1=-42
1759 */
1760 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1761
1762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1763 IEM_MC_BEGIN(0, 1);
1764 IEM_MC_LOCAL(uint64_t, uSrc);
1765
1766 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1767 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1768 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1769 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1770
1771 IEM_MC_ADVANCE_RIP();
1772 IEM_MC_END();
1773 }
1774 else
1775 {
1776 /**
1777 * @opdone
1778 * @opcode 0x16
1779 * @opcodesub !11 mr/reg
1780 * @oppfx none
1781 * @opcpuid sse
1782 * @opgroup og_sse_simdfp_datamove
1783 * @opxcpttype 5
1784 * @optest op1=1 op2=2 -> op1=2
1785 * @optest op1=0 op2=-42 -> op1=-42
1786 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1787 */
1788 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1789
1790 IEM_MC_BEGIN(0, 2);
1791 IEM_MC_LOCAL(uint64_t, uSrc);
1792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1793
1794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1796 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1797 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1798
1799 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1800 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1801
1802 IEM_MC_ADVANCE_RIP();
1803 IEM_MC_END();
1804 }
1805 return VINF_SUCCESS;
1806}
1807
1808
1809/**
1810 * @opcode 0x16
1811 * @opcodesub !11 mr/reg
1812 * @oppfx 0x66
1813 * @opcpuid sse2
1814 * @opgroup og_sse2_pcksclr_datamove
1815 * @opxcpttype 5
1816 * @optest op1=1 op2=2 -> op1=2
1817 * @optest op1=0 op2=-42 -> op1=-42
1818 */
1819FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1820{
1821 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1822 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1823 {
1824 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1825 IEM_MC_BEGIN(0, 2);
1826 IEM_MC_LOCAL(uint64_t, uSrc);
1827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1828
1829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1831 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1832 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1833
1834 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1835 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1836
1837 IEM_MC_ADVANCE_RIP();
1838 IEM_MC_END();
1839 return VINF_SUCCESS;
1840 }
1841
1842 /**
1843 * @opdone
1844 * @opmnemonic ud660f16m3
1845 * @opcode 0x16
1846 * @opcodesub 11 mr/reg
1847 * @oppfx 0x66
1848 * @opunused immediate
1849 * @opcpuid sse
1850 * @optest ->
1851 */
1852 return IEMOP_RAISE_INVALID_OPCODE();
1853}
1854
1855
1856/**
1857 * @opcode 0x16
1858 * @oppfx 0xf3
1859 * @opcpuid sse3
1860 * @opgroup og_sse3_pcksclr_datamove
1861 * @opxcpttype 4
1862 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1863 * op1=0x00000002000000020000000100000001
1864 */
1865FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1866{
1867 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1868 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1869 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1870 {
1871 /*
1872 * Register, register.
1873 */
1874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1875 IEM_MC_BEGIN(2, 0);
1876 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1877 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1878
1879 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1880 IEM_MC_PREPARE_SSE_USAGE();
1881
1882 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1883 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1884 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1885
1886 IEM_MC_ADVANCE_RIP();
1887 IEM_MC_END();
1888 }
1889 else
1890 {
1891 /*
1892 * Register, memory.
1893 */
1894 IEM_MC_BEGIN(2, 2);
1895 IEM_MC_LOCAL(RTUINT128U, uSrc);
1896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1897 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1898 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1899
1900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1902 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1903 IEM_MC_PREPARE_SSE_USAGE();
1904
1905 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1906 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1907 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1908
1909 IEM_MC_ADVANCE_RIP();
1910 IEM_MC_END();
1911 }
1912 return VINF_SUCCESS;
1913}
1914
1915/**
1916 * @opdone
1917 * @opmnemonic udf30f16
1918 * @opcode 0x16
1919 * @oppfx 0xf2
1920 * @opunused intel-modrm
1921 * @opcpuid sse
1922 * @optest ->
1923 * @opdone
1924 */
1925
1926
1927/**
1928 * @opcode 0x17
1929 * @opcodesub !11 mr/reg
1930 * @oppfx none
1931 * @opcpuid sse
1932 * @opgroup og_sse_simdfp_datamove
1933 * @opxcpttype 5
1934 * @optest op1=1 op2=2 -> op1=2
1935 * @optest op1=0 op2=-42 -> op1=-42
1936 */
1937FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
1938{
1939 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1940 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1941 {
1942 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1943
1944 IEM_MC_BEGIN(0, 2);
1945 IEM_MC_LOCAL(uint64_t, uSrc);
1946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1947
1948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1950 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1951 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1952
1953 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1954 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1955
1956 IEM_MC_ADVANCE_RIP();
1957 IEM_MC_END();
1958 return VINF_SUCCESS;
1959 }
1960
1961 /**
1962 * @opdone
1963 * @opmnemonic ud0f17m3
1964 * @opcode 0x17
1965 * @opcodesub 11 mr/reg
1966 * @oppfx none
1967 * @opunused immediate
1968 * @opcpuid sse
1969 * @optest ->
1970 */
1971 return IEMOP_RAISE_INVALID_OPCODE();
1972}
1973
1974
1975/**
1976 * @opcode 0x17
1977 * @opcodesub !11 mr/reg
1978 * @oppfx 0x66
1979 * @opcpuid sse2
1980 * @opgroup og_sse2_pcksclr_datamove
1981 * @opxcpttype 5
1982 * @optest op1=1 op2=2 -> op1=2
1983 * @optest op1=0 op2=-42 -> op1=-42
1984 */
1985FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
1986{
1987 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1988 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1989 {
1990 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1991
1992 IEM_MC_BEGIN(0, 2);
1993 IEM_MC_LOCAL(uint64_t, uSrc);
1994 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1995
1996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1998 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1999 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2000
2001 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2002 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2003
2004 IEM_MC_ADVANCE_RIP();
2005 IEM_MC_END();
2006 return VINF_SUCCESS;
2007 }
2008
2009 /**
2010 * @opdone
2011 * @opmnemonic ud660f17m3
2012 * @opcode 0x17
2013 * @opcodesub 11 mr/reg
2014 * @oppfx 0x66
2015 * @opunused immediate
2016 * @opcpuid sse
2017 * @optest ->
2018 */
2019 return IEMOP_RAISE_INVALID_OPCODE();
2020}
2021
2022
2023/**
2024 * @opdone
2025 * @opmnemonic udf30f17
2026 * @opcode 0x17
2027 * @oppfx 0xf3
2028 * @opunused intel-modrm
2029 * @opcpuid sse
2030 * @optest ->
2031 * @opdone
2032 */
2033
2034/**
2035 * @opmnemonic udf20f17
2036 * @opcode 0x17
2037 * @oppfx 0xf2
2038 * @opunused intel-modrm
2039 * @opcpuid sse
2040 * @optest ->
2041 * @opdone
2042 */
2043
2044
2045/** Opcode 0x0f 0x18. */
2046FNIEMOP_DEF(iemOp_prefetch_Grp16)
2047{
2048 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2049 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2050 {
2051 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2052 {
2053 case 4: /* Aliased to /0 for the time being according to AMD. */
2054 case 5: /* Aliased to /0 for the time being according to AMD. */
2055 case 6: /* Aliased to /0 for the time being according to AMD. */
2056 case 7: /* Aliased to /0 for the time being according to AMD. */
2057 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2058 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2059 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2060 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2061 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2062 }
2063
2064 IEM_MC_BEGIN(0, 1);
2065 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2068 /* Currently a NOP. */
2069 NOREF(GCPtrEffSrc);
2070 IEM_MC_ADVANCE_RIP();
2071 IEM_MC_END();
2072 return VINF_SUCCESS;
2073 }
2074
2075 return IEMOP_RAISE_INVALID_OPCODE();
2076}
2077
2078
2079/** Opcode 0x0f 0x19..0x1f. */
2080FNIEMOP_DEF(iemOp_nop_Ev)
2081{
2082 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2083 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2084 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2085 {
2086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2087 IEM_MC_BEGIN(0, 0);
2088 IEM_MC_ADVANCE_RIP();
2089 IEM_MC_END();
2090 }
2091 else
2092 {
2093 IEM_MC_BEGIN(0, 1);
2094 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2095 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2097 /* Currently a NOP. */
2098 NOREF(GCPtrEffSrc);
2099 IEM_MC_ADVANCE_RIP();
2100 IEM_MC_END();
2101 }
2102 return VINF_SUCCESS;
2103}
2104
2105
2106/** Opcode 0x0f 0x20. */
2107FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2108{
2109 /* mod is ignored, as is operand size overrides. */
2110 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2111 IEMOP_HLP_MIN_386();
2112 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2113 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2114 else
2115 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2116
2117 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2118 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2119 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2120 {
2121 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2122 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2123 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2124 iCrReg |= 8;
2125 }
2126 switch (iCrReg)
2127 {
2128 case 0: case 2: case 3: case 4: case 8:
2129 break;
2130 default:
2131 return IEMOP_RAISE_INVALID_OPCODE();
2132 }
2133 IEMOP_HLP_DONE_DECODING();
2134
2135 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2136}
2137
2138
2139/** Opcode 0x0f 0x21. */
2140FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2141{
2142 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2143 IEMOP_HLP_MIN_386();
2144 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2146 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2147 return IEMOP_RAISE_INVALID_OPCODE();
2148 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2149 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2150 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2151}
2152
2153
2154/** Opcode 0x0f 0x22. */
2155FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2156{
2157 /* mod is ignored, as is operand size overrides. */
2158 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2159 IEMOP_HLP_MIN_386();
2160 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2161 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2162 else
2163 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2164
2165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2166 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2167 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2168 {
2169 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2170 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2171 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2172 iCrReg |= 8;
2173 }
2174 switch (iCrReg)
2175 {
2176 case 0: case 2: case 3: case 4: case 8:
2177 break;
2178 default:
2179 return IEMOP_RAISE_INVALID_OPCODE();
2180 }
2181 IEMOP_HLP_DONE_DECODING();
2182
2183 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2184}
2185
2186
2187/** Opcode 0x0f 0x23. */
2188FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2189{
2190 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2191 IEMOP_HLP_MIN_386();
2192 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2194 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2195 return IEMOP_RAISE_INVALID_OPCODE();
2196 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2197 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2198 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2199}
2200
2201
2202/** Opcode 0x0f 0x24. */
2203FNIEMOP_DEF(iemOp_mov_Rd_Td)
2204{
2205 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2206 /** @todo works on 386 and 486. */
2207 /* The RM byte is not considered, see testcase. */
2208 return IEMOP_RAISE_INVALID_OPCODE();
2209}
2210
2211
2212/** Opcode 0x0f 0x26. */
2213FNIEMOP_DEF(iemOp_mov_Td_Rd)
2214{
2215 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2216 /** @todo works on 386 and 486. */
2217 /* The RM byte is not considered, see testcase. */
2218 return IEMOP_RAISE_INVALID_OPCODE();
2219}
2220
2221
2222/**
2223 * @opcode 0x28
2224 * @oppfx none
2225 * @opcpuid sse
2226 * @opgroup og_sse_simdfp_datamove
2227 * @opxcpttype 1
2228 * @optest op1=1 op2=2 -> op1=2
2229 * @optest op1=0 op2=-42 -> op1=-42
2230 */
2231FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2232{
2233 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2235 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2236 {
2237 /*
2238 * Register, register.
2239 */
2240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2241 IEM_MC_BEGIN(0, 0);
2242 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2243 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2244 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2245 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2246 IEM_MC_ADVANCE_RIP();
2247 IEM_MC_END();
2248 }
2249 else
2250 {
2251 /*
2252 * Register, memory.
2253 */
2254 IEM_MC_BEGIN(0, 2);
2255 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2257
2258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2260 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2261 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2262
2263 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2264 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2265
2266 IEM_MC_ADVANCE_RIP();
2267 IEM_MC_END();
2268 }
2269 return VINF_SUCCESS;
2270}
2271
2272/**
2273 * @opcode 0x28
2274 * @oppfx 66
2275 * @opcpuid sse2
2276 * @opgroup og_sse2_pcksclr_datamove
2277 * @opxcpttype 1
2278 * @optest op1=1 op2=2 -> op1=2
2279 * @optest op1=0 op2=-42 -> op1=-42
2280 */
2281FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2282{
2283 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2284 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2285 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2286 {
2287 /*
2288 * Register, register.
2289 */
2290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2291 IEM_MC_BEGIN(0, 0);
2292 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2294 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2295 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2296 IEM_MC_ADVANCE_RIP();
2297 IEM_MC_END();
2298 }
2299 else
2300 {
2301 /*
2302 * Register, memory.
2303 */
2304 IEM_MC_BEGIN(0, 2);
2305 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2307
2308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2310 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2311 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2312
2313 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2314 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2315
2316 IEM_MC_ADVANCE_RIP();
2317 IEM_MC_END();
2318 }
2319 return VINF_SUCCESS;
2320}
2321
2322/* Opcode 0xf3 0x0f 0x28 - invalid */
2323/* Opcode 0xf2 0x0f 0x28 - invalid */
2324
2325/**
2326 * @opcode 0x29
2327 * @oppfx none
2328 * @opcpuid sse
2329 * @opgroup og_sse_simdfp_datamove
2330 * @opxcpttype 1
2331 * @optest op1=1 op2=2 -> op1=2
2332 * @optest op1=0 op2=-42 -> op1=-42
2333 */
2334FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2335{
2336 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2337 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2338 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2339 {
2340 /*
2341 * Register, register.
2342 */
2343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2344 IEM_MC_BEGIN(0, 0);
2345 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2346 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2347 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2348 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2349 IEM_MC_ADVANCE_RIP();
2350 IEM_MC_END();
2351 }
2352 else
2353 {
2354 /*
2355 * Memory, register.
2356 */
2357 IEM_MC_BEGIN(0, 2);
2358 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2360
2361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2363 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2364 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2365
2366 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2367 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2368
2369 IEM_MC_ADVANCE_RIP();
2370 IEM_MC_END();
2371 }
2372 return VINF_SUCCESS;
2373}
2374
2375/**
2376 * @opcode 0x29
2377 * @oppfx 66
2378 * @opcpuid sse2
2379 * @opgroup og_sse2_pcksclr_datamove
2380 * @opxcpttype 1
2381 * @optest op1=1 op2=2 -> op1=2
2382 * @optest op1=0 op2=-42 -> op1=-42
2383 */
2384FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2385{
2386 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2387 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2388 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2389 {
2390 /*
2391 * Register, register.
2392 */
2393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2394 IEM_MC_BEGIN(0, 0);
2395 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2396 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2397 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2398 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2399 IEM_MC_ADVANCE_RIP();
2400 IEM_MC_END();
2401 }
2402 else
2403 {
2404 /*
2405 * Memory, register.
2406 */
2407 IEM_MC_BEGIN(0, 2);
2408 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2410
2411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2413 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2414 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2415
2416 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2417 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2418
2419 IEM_MC_ADVANCE_RIP();
2420 IEM_MC_END();
2421 }
2422 return VINF_SUCCESS;
2423}
2424
2425/* Opcode 0xf3 0x0f 0x29 - invalid */
2426/* Opcode 0xf2 0x0f 0x29 - invalid */
2427
2428
2429/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2430FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2431/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2432FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2433/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2434FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2435/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2436FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2437
2438
2439/**
2440 * @opcode 0x2b
2441 * @opcodesub !11 mr/reg
2442 * @oppfx none
2443 * @opcpuid sse
2444 * @opgroup og_sse1_cachect
2445 * @opxcpttype 1
2446 * @optest op1=1 op2=2 -> op1=2
2447 * @optest op1=0 op2=-42 -> op1=-42
2448 */
2449FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2450{
2451 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2452 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2453 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2454 {
2455 /*
2456 * memory, register.
2457 */
2458 IEM_MC_BEGIN(0, 2);
2459 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2461
2462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2464 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2465 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2466
2467 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2468 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2469
2470 IEM_MC_ADVANCE_RIP();
2471 IEM_MC_END();
2472 }
2473 /* The register, register encoding is invalid. */
2474 else
2475 return IEMOP_RAISE_INVALID_OPCODE();
2476 return VINF_SUCCESS;
2477}
2478
2479/**
2480 * @opcode 0x2b
2481 * @opcodesub !11 mr/reg
2482 * @oppfx 0x66
2483 * @opcpuid sse2
2484 * @opgroup og_sse2_cachect
2485 * @opxcpttype 1
2486 * @optest op1=1 op2=2 -> op1=2
2487 * @optest op1=0 op2=-42 -> op1=-42
2488 */
2489FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2490{
2491 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2492 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2493 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2494 {
2495 /*
2496 * memory, register.
2497 */
2498 IEM_MC_BEGIN(0, 2);
2499 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2501
2502 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2504 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2505 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2506
2507 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2508 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2509
2510 IEM_MC_ADVANCE_RIP();
2511 IEM_MC_END();
2512 }
2513 /* The register, register encoding is invalid. */
2514 else
2515 return IEMOP_RAISE_INVALID_OPCODE();
2516 return VINF_SUCCESS;
2517}
2518/* Opcode 0xf3 0x0f 0x2b - invalid */
2519/* Opcode 0xf2 0x0f 0x2b - invalid */
2520
2521
2522/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2523FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2524/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2525FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2526/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2527FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2528/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2529FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2530
2531/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2532FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2533/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2534FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2535/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2536FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2537/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2538FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2539
2540/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2541FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2542/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2543FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2544/* Opcode 0xf3 0x0f 0x2e - invalid */
2545/* Opcode 0xf2 0x0f 0x2e - invalid */
2546
2547/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2548FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2549/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2550FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2551/* Opcode 0xf3 0x0f 0x2f - invalid */
2552/* Opcode 0xf2 0x0f 0x2f - invalid */
2553
2554/** Opcode 0x0f 0x30. */
2555FNIEMOP_DEF(iemOp_wrmsr)
2556{
2557 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2559 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2560}
2561
2562
2563/** Opcode 0x0f 0x31. */
2564FNIEMOP_DEF(iemOp_rdtsc)
2565{
2566 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2568 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2569}
2570
2571
2572/** Opcode 0x0f 0x33. */
2573FNIEMOP_DEF(iemOp_rdmsr)
2574{
2575 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2577 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2578}
2579
2580
2581/** Opcode 0x0f 0x34. */
2582FNIEMOP_DEF(iemOp_rdpmc)
2583{
2584 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2586 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2587}
2588
2589
2590/** Opcode 0x0f 0x34. */
2591FNIEMOP_STUB(iemOp_sysenter);
2592/** Opcode 0x0f 0x35. */
2593FNIEMOP_STUB(iemOp_sysexit);
2594/** Opcode 0x0f 0x37. */
2595FNIEMOP_STUB(iemOp_getsec);
2596
2597
2598/** Opcode 0x0f 0x38. */
2599FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2600{
2601#ifdef IEM_WITH_THREE_0F_38
2602 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2603 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2604#else
2605 IEMOP_BITCH_ABOUT_STUB();
2606 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2607#endif
2608}
2609
2610
2611/** Opcode 0x0f 0x3a. */
2612FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2613{
2614#ifdef IEM_WITH_THREE_0F_3A
2615 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2616 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2617#else
2618 IEMOP_BITCH_ABOUT_STUB();
2619 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2620#endif
2621}
2622
2623
2624/**
2625 * Implements a conditional move.
2626 *
2627 * Wish there was an obvious way to do this where we could share and reduce
2628 * code bloat.
2629 *
2630 * @param a_Cnd The conditional "microcode" operation.
2631 */
2632#define CMOV_X(a_Cnd) \
2633 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2634 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2635 { \
2636 switch (pVCpu->iem.s.enmEffOpSize) \
2637 { \
2638 case IEMMODE_16BIT: \
2639 IEM_MC_BEGIN(0, 1); \
2640 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2641 a_Cnd { \
2642 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2643 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2644 } IEM_MC_ENDIF(); \
2645 IEM_MC_ADVANCE_RIP(); \
2646 IEM_MC_END(); \
2647 return VINF_SUCCESS; \
2648 \
2649 case IEMMODE_32BIT: \
2650 IEM_MC_BEGIN(0, 1); \
2651 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2652 a_Cnd { \
2653 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2654 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2655 } IEM_MC_ELSE() { \
2656 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2657 } IEM_MC_ENDIF(); \
2658 IEM_MC_ADVANCE_RIP(); \
2659 IEM_MC_END(); \
2660 return VINF_SUCCESS; \
2661 \
2662 case IEMMODE_64BIT: \
2663 IEM_MC_BEGIN(0, 1); \
2664 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2665 a_Cnd { \
2666 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2667 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2668 } IEM_MC_ENDIF(); \
2669 IEM_MC_ADVANCE_RIP(); \
2670 IEM_MC_END(); \
2671 return VINF_SUCCESS; \
2672 \
2673 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2674 } \
2675 } \
2676 else \
2677 { \
2678 switch (pVCpu->iem.s.enmEffOpSize) \
2679 { \
2680 case IEMMODE_16BIT: \
2681 IEM_MC_BEGIN(0, 2); \
2682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2683 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2685 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2686 a_Cnd { \
2687 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2688 } IEM_MC_ENDIF(); \
2689 IEM_MC_ADVANCE_RIP(); \
2690 IEM_MC_END(); \
2691 return VINF_SUCCESS; \
2692 \
2693 case IEMMODE_32BIT: \
2694 IEM_MC_BEGIN(0, 2); \
2695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2696 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2698 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2699 a_Cnd { \
2700 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2701 } IEM_MC_ELSE() { \
2702 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2703 } IEM_MC_ENDIF(); \
2704 IEM_MC_ADVANCE_RIP(); \
2705 IEM_MC_END(); \
2706 return VINF_SUCCESS; \
2707 \
2708 case IEMMODE_64BIT: \
2709 IEM_MC_BEGIN(0, 2); \
2710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2711 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2713 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2714 a_Cnd { \
2715 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2716 } IEM_MC_ENDIF(); \
2717 IEM_MC_ADVANCE_RIP(); \
2718 IEM_MC_END(); \
2719 return VINF_SUCCESS; \
2720 \
2721 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2722 } \
2723 } do {} while (0)
2724
2725
2726
2727/** Opcode 0x0f 0x40. */
2728FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2729{
2730 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2731 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2732}
2733
2734
2735/** Opcode 0x0f 0x41. */
2736FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2737{
2738 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2739 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2740}
2741
2742
2743/** Opcode 0x0f 0x42. */
2744FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2745{
2746 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2747 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2748}
2749
2750
2751/** Opcode 0x0f 0x43. */
2752FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2753{
2754 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2755 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2756}
2757
2758
2759/** Opcode 0x0f 0x44. */
2760FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2761{
2762 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2763 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2764}
2765
2766
2767/** Opcode 0x0f 0x45. */
2768FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2769{
2770 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2771 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2772}
2773
2774
2775/** Opcode 0x0f 0x46. */
2776FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2777{
2778 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2779 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2780}
2781
2782
2783/** Opcode 0x0f 0x47. */
2784FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2785{
2786 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2787 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2788}
2789
2790
2791/** Opcode 0x0f 0x48. */
2792FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2793{
2794 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2795 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2796}
2797
2798
2799/** Opcode 0x0f 0x49. */
2800FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2801{
2802 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2803 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2804}
2805
2806
2807/** Opcode 0x0f 0x4a. */
2808FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2809{
2810 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2811 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2812}
2813
2814
2815/** Opcode 0x0f 0x4b. */
2816FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2817{
2818 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2819 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2820}
2821
2822
2823/** Opcode 0x0f 0x4c. */
2824FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2825{
2826 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2827 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2828}
2829
2830
2831/** Opcode 0x0f 0x4d. */
2832FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2833{
2834 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2835 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2836}
2837
2838
2839/** Opcode 0x0f 0x4e. */
2840FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2841{
2842 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2843 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2844}
2845
2846
2847/** Opcode 0x0f 0x4f. */
2848FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2849{
2850 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2851 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2852}
2853
2854#undef CMOV_X
2855
2856/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2857FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2858/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2859FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2860/* Opcode 0xf3 0x0f 0x50 - invalid */
2861/* Opcode 0xf2 0x0f 0x50 - invalid */
2862
2863/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2864FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2865/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2866FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2867/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2868FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2869/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2870FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2871
2872/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2873FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2874/* Opcode 0x66 0x0f 0x52 - invalid */
2875/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2876FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2877/* Opcode 0xf2 0x0f 0x52 - invalid */
2878
2879/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2880FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2881/* Opcode 0x66 0x0f 0x53 - invalid */
2882/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2883FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2884/* Opcode 0xf2 0x0f 0x53 - invalid */
2885
2886/** Opcode 0x0f 0x54 - andps Vps, Wps */
2887FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2888/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2889FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2890/* Opcode 0xf3 0x0f 0x54 - invalid */
2891/* Opcode 0xf2 0x0f 0x54 - invalid */
2892
2893/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2894FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2895/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2896FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2897/* Opcode 0xf3 0x0f 0x55 - invalid */
2898/* Opcode 0xf2 0x0f 0x55 - invalid */
2899
2900/** Opcode 0x0f 0x56 - orps Vps, Wps */
2901FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2902/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2903FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2904/* Opcode 0xf3 0x0f 0x56 - invalid */
2905/* Opcode 0xf2 0x0f 0x56 - invalid */
2906
2907/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2908FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2909/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2910FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2911/* Opcode 0xf3 0x0f 0x57 - invalid */
2912/* Opcode 0xf2 0x0f 0x57 - invalid */
2913
2914/** Opcode 0x0f 0x58 - addps Vps, Wps */
2915FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2916/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2917FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2918/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2919FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2920/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2921FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2922
2923/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2924FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2925/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2926FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2927/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2928FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2929/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2930FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2931
2932/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2933FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2934/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2935FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2936/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2937FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2938/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2939FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2940
2941/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2942FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2943/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2944FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
2945/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
2946FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
2947/* Opcode 0xf2 0x0f 0x5b - invalid */
2948
2949/** Opcode 0x0f 0x5c - subps Vps, Wps */
2950FNIEMOP_STUB(iemOp_subps_Vps_Wps);
2951/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
2952FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
2953/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
2954FNIEMOP_STUB(iemOp_subss_Vss_Wss);
2955/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
2956FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
2957
2958/** Opcode 0x0f 0x5d - minps Vps, Wps */
2959FNIEMOP_STUB(iemOp_minps_Vps_Wps);
2960/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
2961FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
2962/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
2963FNIEMOP_STUB(iemOp_minss_Vss_Wss);
2964/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
2965FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
2966
2967/** Opcode 0x0f 0x5e - divps Vps, Wps */
2968FNIEMOP_STUB(iemOp_divps_Vps_Wps);
2969/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
2970FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
2971/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
2972FNIEMOP_STUB(iemOp_divss_Vss_Wss);
2973/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
2974FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
2975
2976/** Opcode 0x0f 0x5f - maxps Vps, Wps */
2977FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
2978/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
2979FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
2980/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
2981FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
2982/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
2983FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
2984
2985/**
2986 * Common worker for MMX instructions on the forms:
2987 * pxxxx mm1, mm2/mem32
2988 *
2989 * The 2nd operand is the first half of a register, which in the memory case
2990 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2991 * memory accessed for MMX.
2992 *
2993 * Exceptions type 4.
2994 */
2995FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2996{
2997 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2998 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2999 {
3000 /*
3001 * Register, register.
3002 */
3003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3004 IEM_MC_BEGIN(2, 0);
3005 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3006 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3007 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3008 IEM_MC_PREPARE_SSE_USAGE();
3009 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3010 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3011 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3012 IEM_MC_ADVANCE_RIP();
3013 IEM_MC_END();
3014 }
3015 else
3016 {
3017 /*
3018 * Register, memory.
3019 */
3020 IEM_MC_BEGIN(2, 2);
3021 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3022 IEM_MC_LOCAL(uint64_t, uSrc);
3023 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3024 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3025
3026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3028 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3029 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3030
3031 IEM_MC_PREPARE_SSE_USAGE();
3032 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3033 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3034
3035 IEM_MC_ADVANCE_RIP();
3036 IEM_MC_END();
3037 }
3038 return VINF_SUCCESS;
3039}
3040
3041
3042/**
3043 * Common worker for SSE2 instructions on the forms:
3044 * pxxxx xmm1, xmm2/mem128
3045 *
3046 * The 2nd operand is the first half of a register, which in the memory case
3047 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3048 * memory accessed for MMX.
3049 *
3050 * Exceptions type 4.
3051 */
3052FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3053{
3054 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3055 if (!pImpl->pfnU64)
3056 return IEMOP_RAISE_INVALID_OPCODE();
3057 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3058 {
3059 /*
3060 * Register, register.
3061 */
3062 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3063 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3065 IEM_MC_BEGIN(2, 0);
3066 IEM_MC_ARG(uint64_t *, pDst, 0);
3067 IEM_MC_ARG(uint32_t const *, pSrc, 1);
3068 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3069 IEM_MC_PREPARE_FPU_USAGE();
3070 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3071 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3072 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3073 IEM_MC_ADVANCE_RIP();
3074 IEM_MC_END();
3075 }
3076 else
3077 {
3078 /*
3079 * Register, memory.
3080 */
3081 IEM_MC_BEGIN(2, 2);
3082 IEM_MC_ARG(uint64_t *, pDst, 0);
3083 IEM_MC_LOCAL(uint32_t, uSrc);
3084 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3086
3087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3089 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3090 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3091
3092 IEM_MC_PREPARE_FPU_USAGE();
3093 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3094 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3095
3096 IEM_MC_ADVANCE_RIP();
3097 IEM_MC_END();
3098 }
3099 return VINF_SUCCESS;
3100}
3101
3102
3103/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3104FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3105{
3106 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3107 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3108}
3109
3110/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3111FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3112{
3113 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3114 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3115}
3116
3117/* Opcode 0xf3 0x0f 0x60 - invalid */
3118
3119
3120/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3121FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3122{
3123 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3124 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3125}
3126
3127/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3128FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3129{
3130 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3131 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3132}
3133
3134/* Opcode 0xf3 0x0f 0x61 - invalid */
3135
3136
3137/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3138FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3139{
3140 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3141 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3142}
3143
3144/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3145FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3146{
3147 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3148 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3149}
3150
3151/* Opcode 0xf3 0x0f 0x62 - invalid */
3152
3153
3154
3155/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3156FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3157/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3158FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3159/* Opcode 0xf3 0x0f 0x63 - invalid */
3160
3161/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3162FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3163/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3164FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3165/* Opcode 0xf3 0x0f 0x64 - invalid */
3166
3167/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3168FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3169/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3170FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3171/* Opcode 0xf3 0x0f 0x65 - invalid */
3172
3173/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3174FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3175/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3176FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3177/* Opcode 0xf3 0x0f 0x66 - invalid */
3178
3179/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3180FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3181/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3182FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3183/* Opcode 0xf3 0x0f 0x67 - invalid */
3184
3185
3186/**
3187 * Common worker for MMX instructions on the form:
3188 * pxxxx mm1, mm2/mem64
3189 *
3190 * The 2nd operand is the second half of a register, which in the memory case
3191 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3192 * where it may read the full 128 bits or only the upper 64 bits.
3193 *
3194 * Exceptions type 4.
3195 */
3196FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3197{
3198 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3199 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3200 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3201 {
3202 /*
3203 * Register, register.
3204 */
3205 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3206 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3208 IEM_MC_BEGIN(2, 0);
3209 IEM_MC_ARG(uint64_t *, pDst, 0);
3210 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3211 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3212 IEM_MC_PREPARE_FPU_USAGE();
3213 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3214 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3215 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3216 IEM_MC_ADVANCE_RIP();
3217 IEM_MC_END();
3218 }
3219 else
3220 {
3221 /*
3222 * Register, memory.
3223 */
3224 IEM_MC_BEGIN(2, 2);
3225 IEM_MC_ARG(uint64_t *, pDst, 0);
3226 IEM_MC_LOCAL(uint64_t, uSrc);
3227 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3229
3230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3232 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3233 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3234
3235 IEM_MC_PREPARE_FPU_USAGE();
3236 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3237 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3238
3239 IEM_MC_ADVANCE_RIP();
3240 IEM_MC_END();
3241 }
3242 return VINF_SUCCESS;
3243}
3244
3245
3246/**
3247 * Common worker for SSE2 instructions on the form:
3248 * pxxxx xmm1, xmm2/mem128
3249 *
3250 * The 2nd operand is the second half of a register, which in the memory case
3251 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3252 * where it may read the full 128 bits or only the upper 64 bits.
3253 *
3254 * Exceptions type 4.
3255 */
3256FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3257{
3258 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3259 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3260 {
3261 /*
3262 * Register, register.
3263 */
3264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3265 IEM_MC_BEGIN(2, 0);
3266 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3267 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3268 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3269 IEM_MC_PREPARE_SSE_USAGE();
3270 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3271 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3272 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3273 IEM_MC_ADVANCE_RIP();
3274 IEM_MC_END();
3275 }
3276 else
3277 {
3278 /*
3279 * Register, memory.
3280 */
3281 IEM_MC_BEGIN(2, 2);
3282 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3283 IEM_MC_LOCAL(RTUINT128U, uSrc);
3284 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3286
3287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3289 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3290 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3291
3292 IEM_MC_PREPARE_SSE_USAGE();
3293 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3294 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3295
3296 IEM_MC_ADVANCE_RIP();
3297 IEM_MC_END();
3298 }
3299 return VINF_SUCCESS;
3300}
3301
3302
3303/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3304FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3305{
3306 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3307 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3308}
3309
3310/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3311FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3312{
3313 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3314 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3315}
3316/* Opcode 0xf3 0x0f 0x68 - invalid */
3317
3318
3319/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3320FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3321{
3322 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3323 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3324}
3325
3326/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3327FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3328{
3329 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3330 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3331
3332}
3333/* Opcode 0xf3 0x0f 0x69 - invalid */
3334
3335
3336/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3337FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3338{
3339 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3340 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3341}
3342
3343/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3344FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3345{
3346 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3347 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3348}
3349/* Opcode 0xf3 0x0f 0x6a - invalid */
3350
3351
3352/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3353FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3354/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3355FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3356/* Opcode 0xf3 0x0f 0x6b - invalid */
3357
3358
3359/* Opcode 0x0f 0x6c - invalid */
3360
3361/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3362FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3363{
3364 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3365 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3366}
3367
3368/* Opcode 0xf3 0x0f 0x6c - invalid */
3369/* Opcode 0xf2 0x0f 0x6c - invalid */
3370
3371
3372/* Opcode 0x0f 0x6d - invalid */
3373
3374/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3375FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3376{
3377 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3378 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3379}
3380
3381/* Opcode 0xf3 0x0f 0x6d - invalid */
3382
3383
3384FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3385{
3386 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3387 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3388 {
3389 /**
3390 * @opcode 0x6e
3391 * @opcodesub rex.w=1
3392 * @oppfx none
3393 * @opcpuid mmx
3394 * @opgroup og_mmx_datamove
3395 * @opxcpttype 5
3396 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
3397 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
3398 */
3399 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3400 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3401 {
3402 /* MMX, greg64 */
3403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3404 IEM_MC_BEGIN(0, 1);
3405 IEM_MC_LOCAL(uint64_t, u64Tmp);
3406
3407 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3408 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3409
3410 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3411 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3412 IEM_MC_FPU_TO_MMX_MODE();
3413
3414 IEM_MC_ADVANCE_RIP();
3415 IEM_MC_END();
3416 }
3417 else
3418 {
3419 /* MMX, [mem64] */
3420 IEM_MC_BEGIN(0, 2);
3421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3422 IEM_MC_LOCAL(uint64_t, u64Tmp);
3423
3424 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3426 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3427 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3428
3429 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3430 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3431 IEM_MC_FPU_TO_MMX_MODE();
3432
3433 IEM_MC_ADVANCE_RIP();
3434 IEM_MC_END();
3435 }
3436 }
3437 else
3438 {
3439 /**
3440 * @opdone
3441 * @opcode 0x6e
3442 * @opcodesub rex.w=0
3443 * @oppfx none
3444 * @opcpuid mmx
3445 * @opgroup og_mmx_datamove
3446 * @opxcpttype 5
3447 * @opfunction iemOp_movd_q_Pd_Ey
3448 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3449 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3450 */
3451 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3452 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3453 {
3454 /* MMX, greg */
3455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3456 IEM_MC_BEGIN(0, 1);
3457 IEM_MC_LOCAL(uint64_t, u64Tmp);
3458
3459 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3460 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3461
3462 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3463 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3464 IEM_MC_FPU_TO_MMX_MODE();
3465
3466 IEM_MC_ADVANCE_RIP();
3467 IEM_MC_END();
3468 }
3469 else
3470 {
3471 /* MMX, [mem] */
3472 IEM_MC_BEGIN(0, 2);
3473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3474 IEM_MC_LOCAL(uint32_t, u32Tmp);
3475
3476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3478 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3479 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3480
3481 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3482 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3483 IEM_MC_FPU_TO_MMX_MODE();
3484
3485 IEM_MC_ADVANCE_RIP();
3486 IEM_MC_END();
3487 }
3488 }
3489 return VINF_SUCCESS;
3490}
3491
3492FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3493{
3494 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3495 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3496 {
3497 /**
3498 * @opcode 0x6e
3499 * @opcodesub rex.w=1
3500 * @oppfx 0x66
3501 * @opcpuid sse2
3502 * @opgroup og_sse2_simdint_datamove
3503 * @opxcpttype 5
3504 * @optest 64-bit / op1=1 op2=2 -> op1=2
3505 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3506 */
3507 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3508 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3509 {
3510 /* XMM, greg64 */
3511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3512 IEM_MC_BEGIN(0, 1);
3513 IEM_MC_LOCAL(uint64_t, u64Tmp);
3514
3515 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3516 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3517
3518 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3519 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3520
3521 IEM_MC_ADVANCE_RIP();
3522 IEM_MC_END();
3523 }
3524 else
3525 {
3526 /* XMM, [mem64] */
3527 IEM_MC_BEGIN(0, 2);
3528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3529 IEM_MC_LOCAL(uint64_t, u64Tmp);
3530
3531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3533 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3534 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3535
3536 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3537 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3538
3539 IEM_MC_ADVANCE_RIP();
3540 IEM_MC_END();
3541 }
3542 }
3543 else
3544 {
3545 /**
3546 * @opdone
3547 * @opcode 0x6e
3548 * @opcodesub rex.w=0
3549 * @oppfx 0x66
3550 * @opcpuid sse2
3551 * @opgroup og_sse2_simdint_datamove
3552 * @opxcpttype 5
3553 * @opfunction iemOp_movd_q_Vy_Ey
3554 * @optest op1=1 op2=2 -> op1=2
3555 * @optest op1=0 op2=-42 -> op1=-42
3556 */
3557 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3558 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3559 {
3560 /* XMM, greg32 */
3561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3562 IEM_MC_BEGIN(0, 1);
3563 IEM_MC_LOCAL(uint32_t, u32Tmp);
3564
3565 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3566 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3567
3568 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3569 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3570
3571 IEM_MC_ADVANCE_RIP();
3572 IEM_MC_END();
3573 }
3574 else
3575 {
3576 /* XMM, [mem32] */
3577 IEM_MC_BEGIN(0, 2);
3578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3579 IEM_MC_LOCAL(uint32_t, u32Tmp);
3580
3581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3583 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3584 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3585
3586 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3587 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3588
3589 IEM_MC_ADVANCE_RIP();
3590 IEM_MC_END();
3591 }
3592 }
3593 return VINF_SUCCESS;
3594}
3595
3596/* Opcode 0xf3 0x0f 0x6e - invalid */
3597
3598
3599/**
3600 * @opcode 0x6f
3601 * @oppfx none
3602 * @opcpuid mmx
3603 * @opgroup og_mmx_datamove
3604 * @opxcpttype 5
3605 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3606 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3607 */
3608FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3609{
3610 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3611 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3612 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3613 {
3614 /*
3615 * Register, register.
3616 */
3617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3618 IEM_MC_BEGIN(0, 1);
3619 IEM_MC_LOCAL(uint64_t, u64Tmp);
3620
3621 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3622 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3623
3624 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3625 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3626 IEM_MC_FPU_TO_MMX_MODE();
3627
3628 IEM_MC_ADVANCE_RIP();
3629 IEM_MC_END();
3630 }
3631 else
3632 {
3633 /*
3634 * Register, memory.
3635 */
3636 IEM_MC_BEGIN(0, 2);
3637 IEM_MC_LOCAL(uint64_t, u64Tmp);
3638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3639
3640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3642 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3643 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3644
3645 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3646 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3647 IEM_MC_FPU_TO_MMX_MODE();
3648
3649 IEM_MC_ADVANCE_RIP();
3650 IEM_MC_END();
3651 }
3652 return VINF_SUCCESS;
3653}
3654
3655/**
3656 * @opcode 0x6f
3657 * @oppfx 0x66
3658 * @opcpuid sse2
3659 * @opgroup og_sse2_simdint_datamove
3660 * @opxcpttype 1
3661 * @optest op1=1 op2=2 -> op1=2
3662 * @optest op1=0 op2=-42 -> op1=-42
3663 */
3664FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
3665{
3666 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3667 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3668 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3669 {
3670 /*
3671 * Register, register.
3672 */
3673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3674 IEM_MC_BEGIN(0, 0);
3675
3676 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3677 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3678
3679 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3680 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3681 IEM_MC_ADVANCE_RIP();
3682 IEM_MC_END();
3683 }
3684 else
3685 {
3686 /*
3687 * Register, memory.
3688 */
3689 IEM_MC_BEGIN(0, 2);
3690 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3692
3693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3695 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3696 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3697
3698 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3699 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3700
3701 IEM_MC_ADVANCE_RIP();
3702 IEM_MC_END();
3703 }
3704 return VINF_SUCCESS;
3705}
3706
3707/**
3708 * @opcode 0x6f
3709 * @oppfx 0xf3
3710 * @opcpuid sse2
3711 * @opgroup og_sse2_simdint_datamove
3712 * @opxcpttype 4UA
3713 * @optest op1=1 op2=2 -> op1=2
3714 * @optest op1=0 op2=-42 -> op1=-42
3715 */
3716FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
3717{
3718 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3719 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3720 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3721 {
3722 /*
3723 * Register, register.
3724 */
3725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3726 IEM_MC_BEGIN(0, 0);
3727 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3728 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3729 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3730 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3731 IEM_MC_ADVANCE_RIP();
3732 IEM_MC_END();
3733 }
3734 else
3735 {
3736 /*
3737 * Register, memory.
3738 */
3739 IEM_MC_BEGIN(0, 2);
3740 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3742
3743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3745 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3746 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3747 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3748 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3749
3750 IEM_MC_ADVANCE_RIP();
3751 IEM_MC_END();
3752 }
3753 return VINF_SUCCESS;
3754}
3755
3756
3757/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3758FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3759{
3760 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3761 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3762 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3763 {
3764 /*
3765 * Register, register.
3766 */
3767 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3769
3770 IEM_MC_BEGIN(3, 0);
3771 IEM_MC_ARG(uint64_t *, pDst, 0);
3772 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3773 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3774 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3775 IEM_MC_PREPARE_FPU_USAGE();
3776 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3777 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3778 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3779 IEM_MC_ADVANCE_RIP();
3780 IEM_MC_END();
3781 }
3782 else
3783 {
3784 /*
3785 * Register, memory.
3786 */
3787 IEM_MC_BEGIN(3, 2);
3788 IEM_MC_ARG(uint64_t *, pDst, 0);
3789 IEM_MC_LOCAL(uint64_t, uSrc);
3790 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3792
3793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3794 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3795 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3797 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3798
3799 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3800 IEM_MC_PREPARE_FPU_USAGE();
3801 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3802 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3803
3804 IEM_MC_ADVANCE_RIP();
3805 IEM_MC_END();
3806 }
3807 return VINF_SUCCESS;
3808}
3809
3810/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3811FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3812{
3813 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3815 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3816 {
3817 /*
3818 * Register, register.
3819 */
3820 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3822
3823 IEM_MC_BEGIN(3, 0);
3824 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3825 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3826 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3827 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3828 IEM_MC_PREPARE_SSE_USAGE();
3829 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3830 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3831 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3832 IEM_MC_ADVANCE_RIP();
3833 IEM_MC_END();
3834 }
3835 else
3836 {
3837 /*
3838 * Register, memory.
3839 */
3840 IEM_MC_BEGIN(3, 2);
3841 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3842 IEM_MC_LOCAL(RTUINT128U, uSrc);
3843 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3844 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3845
3846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3847 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3848 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3850 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3851
3852 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3853 IEM_MC_PREPARE_SSE_USAGE();
3854 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3855 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3856
3857 IEM_MC_ADVANCE_RIP();
3858 IEM_MC_END();
3859 }
3860 return VINF_SUCCESS;
3861}
3862
3863/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3864FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3865{
3866 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3867 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3868 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3869 {
3870 /*
3871 * Register, register.
3872 */
3873 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3875
3876 IEM_MC_BEGIN(3, 0);
3877 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3878 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3879 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3880 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3881 IEM_MC_PREPARE_SSE_USAGE();
3882 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3883 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3884 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3885 IEM_MC_ADVANCE_RIP();
3886 IEM_MC_END();
3887 }
3888 else
3889 {
3890 /*
3891 * Register, memory.
3892 */
3893 IEM_MC_BEGIN(3, 2);
3894 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3895 IEM_MC_LOCAL(RTUINT128U, uSrc);
3896 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3897 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3898
3899 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3900 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3901 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3903 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3904
3905 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3906 IEM_MC_PREPARE_SSE_USAGE();
3907 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3908 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3909
3910 IEM_MC_ADVANCE_RIP();
3911 IEM_MC_END();
3912 }
3913 return VINF_SUCCESS;
3914}
3915
3916/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3917FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3918{
3919 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3920 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3921 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3922 {
3923 /*
3924 * Register, register.
3925 */
3926 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3928
3929 IEM_MC_BEGIN(3, 0);
3930 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3931 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3932 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3933 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3934 IEM_MC_PREPARE_SSE_USAGE();
3935 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3936 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3937 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3938 IEM_MC_ADVANCE_RIP();
3939 IEM_MC_END();
3940 }
3941 else
3942 {
3943 /*
3944 * Register, memory.
3945 */
3946 IEM_MC_BEGIN(3, 2);
3947 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3948 IEM_MC_LOCAL(RTUINT128U, uSrc);
3949 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3951
3952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3953 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3954 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3956 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3957
3958 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3959 IEM_MC_PREPARE_SSE_USAGE();
3960 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3961 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3962
3963 IEM_MC_ADVANCE_RIP();
3964 IEM_MC_END();
3965 }
3966 return VINF_SUCCESS;
3967}
3968
3969
3970/** Opcode 0x0f 0x71 11/2. */
3971FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3972
3973/** Opcode 0x66 0x0f 0x71 11/2. */
3974FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3975
3976/** Opcode 0x0f 0x71 11/4. */
3977FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3978
3979/** Opcode 0x66 0x0f 0x71 11/4. */
3980FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3981
3982/** Opcode 0x0f 0x71 11/6. */
3983FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3984
3985/** Opcode 0x66 0x0f 0x71 11/6. */
3986FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3987
3988
3989/**
3990 * Group 12 jump table for register variant.
3991 */
3992IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3993{
3994 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3995 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3996 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3997 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3998 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3999 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4000 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4001 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4002};
4003AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
4004
4005
4006/** Opcode 0x0f 0x71. */
4007FNIEMOP_DEF(iemOp_Grp12)
4008{
4009 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4010 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4011 /* register, register */
4012 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4013 + pVCpu->iem.s.idxPrefix], bRm);
4014 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4015}
4016
4017
4018/** Opcode 0x0f 0x72 11/2. */
4019FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
4020
4021/** Opcode 0x66 0x0f 0x72 11/2. */
4022FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
4023
4024/** Opcode 0x0f 0x72 11/4. */
4025FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
4026
4027/** Opcode 0x66 0x0f 0x72 11/4. */
4028FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
4029
4030/** Opcode 0x0f 0x72 11/6. */
4031FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
4032
4033/** Opcode 0x66 0x0f 0x72 11/6. */
4034FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
4035
4036
4037/**
4038 * Group 13 jump table for register variant.
4039 */
4040IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
4041{
4042 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4043 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4044 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4045 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4046 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4047 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4048 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4049 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4050};
4051AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
4052
4053/** Opcode 0x0f 0x72. */
4054FNIEMOP_DEF(iemOp_Grp13)
4055{
4056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4057 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4058 /* register, register */
4059 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4060 + pVCpu->iem.s.idxPrefix], bRm);
4061 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4062}
4063
4064
4065/** Opcode 0x0f 0x73 11/2. */
4066FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
4067
4068/** Opcode 0x66 0x0f 0x73 11/2. */
4069FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
4070
4071/** Opcode 0x66 0x0f 0x73 11/3. */
4072FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
4073
4074/** Opcode 0x0f 0x73 11/6. */
4075FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
4076
4077/** Opcode 0x66 0x0f 0x73 11/6. */
4078FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
4079
4080/** Opcode 0x66 0x0f 0x73 11/7. */
4081FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
4082
4083/**
4084 * Group 14 jump table for register variant.
4085 */
4086IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4087{
4088 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4089 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4090 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4091 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4092 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4093 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4094 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4095 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4096};
4097AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4098
4099
4100/** Opcode 0x0f 0x73. */
4101FNIEMOP_DEF(iemOp_Grp14)
4102{
4103 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4104 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4105 /* register, register */
4106 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4107 + pVCpu->iem.s.idxPrefix], bRm);
4108 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4109}
4110
4111
4112/**
4113 * Common worker for MMX instructions on the form:
4114 * pxxx mm1, mm2/mem64
4115 */
4116FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4117{
4118 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4119 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4120 {
4121 /*
4122 * Register, register.
4123 */
4124 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4125 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4127 IEM_MC_BEGIN(2, 0);
4128 IEM_MC_ARG(uint64_t *, pDst, 0);
4129 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4130 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4131 IEM_MC_PREPARE_FPU_USAGE();
4132 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4133 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
4134 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4135 IEM_MC_ADVANCE_RIP();
4136 IEM_MC_END();
4137 }
4138 else
4139 {
4140 /*
4141 * Register, memory.
4142 */
4143 IEM_MC_BEGIN(2, 2);
4144 IEM_MC_ARG(uint64_t *, pDst, 0);
4145 IEM_MC_LOCAL(uint64_t, uSrc);
4146 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4148
4149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4151 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4152 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4153
4154 IEM_MC_PREPARE_FPU_USAGE();
4155 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4156 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4157
4158 IEM_MC_ADVANCE_RIP();
4159 IEM_MC_END();
4160 }
4161 return VINF_SUCCESS;
4162}
4163
4164
4165/**
4166 * Common worker for SSE2 instructions on the forms:
4167 * pxxx xmm1, xmm2/mem128
4168 *
4169 * Proper alignment of the 128-bit operand is enforced.
4170 * Exceptions type 4. SSE2 cpuid checks.
4171 */
4172FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4173{
4174 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4175 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4176 {
4177 /*
4178 * Register, register.
4179 */
4180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4181 IEM_MC_BEGIN(2, 0);
4182 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4183 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4184 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4185 IEM_MC_PREPARE_SSE_USAGE();
4186 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4187 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4188 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4189 IEM_MC_ADVANCE_RIP();
4190 IEM_MC_END();
4191 }
4192 else
4193 {
4194 /*
4195 * Register, memory.
4196 */
4197 IEM_MC_BEGIN(2, 2);
4198 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4199 IEM_MC_LOCAL(RTUINT128U, uSrc);
4200 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4202
4203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4205 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4206 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4207
4208 IEM_MC_PREPARE_SSE_USAGE();
4209 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4210 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4211
4212 IEM_MC_ADVANCE_RIP();
4213 IEM_MC_END();
4214 }
4215 return VINF_SUCCESS;
4216}
4217
4218
4219/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4220FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4221{
4222 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
4223 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4224}
4225
4226/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4227FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4228{
4229 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
4230 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4231}
4232
4233/* Opcode 0xf3 0x0f 0x74 - invalid */
4234/* Opcode 0xf2 0x0f 0x74 - invalid */
4235
4236
4237/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4238FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4239{
4240 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
4241 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4242}
4243
4244/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4245FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4246{
4247 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
4248 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4249}
4250
4251/* Opcode 0xf3 0x0f 0x75 - invalid */
4252/* Opcode 0xf2 0x0f 0x75 - invalid */
4253
4254
4255/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4256FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4257{
4258 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
4259 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4260}
4261
4262/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4263FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4264{
4265 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
4266 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4267}
4268
4269/* Opcode 0xf3 0x0f 0x76 - invalid */
4270/* Opcode 0xf2 0x0f 0x76 - invalid */
4271
4272
4273/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4274FNIEMOP_DEF(iemOp_emms)
4275{
4276 IEMOP_MNEMONIC(emms, "emms");
4277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4278
4279 IEM_MC_BEGIN(0,0);
4280 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
4281 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4282 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4283 IEM_MC_FPU_FROM_MMX_MODE();
4284 IEM_MC_ADVANCE_RIP();
4285 IEM_MC_END();
4286 return VINF_SUCCESS;
4287}
4288
4289/* Opcode 0x66 0x0f 0x77 - invalid */
4290/* Opcode 0xf3 0x0f 0x77 - invalid */
4291/* Opcode 0xf2 0x0f 0x77 - invalid */
4292
4293/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4294#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4295FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
4296{
4297 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
4298 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVInstrDiag_Vmread);
4299 IEMOP_HLP_VMX_INSTR("vmread", kVmxVInstrDiag_Vmread);
4300 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
4301
4302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4303 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4304 {
4305 /*
4306 * Register, register.
4307 */
4308 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4309 if (enmEffOpSize == IEMMODE_64BIT)
4310 {
4311 IEM_MC_BEGIN(2, 0);
4312 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4313 IEM_MC_ARG(uint64_t, u64Enc, 1);
4314 IEM_MC_FETCH_GREG_U64(u64Enc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4315 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4316 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread64_reg, pu64Dst, u64Enc);
4317 IEM_MC_END();
4318 }
4319 else
4320 {
4321 IEM_MC_BEGIN(2, 0);
4322 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4323 IEM_MC_ARG(uint32_t, u32Enc, 1);
4324 IEM_MC_FETCH_GREG_U32(u32Enc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4325 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4326 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread32_reg, pu32Dst, u32Enc);
4327 IEM_MC_END();
4328 }
4329 }
4330 else
4331 {
4332 /*
4333 * Register, memory.
4334 */
4335 if (enmEffOpSize == IEMMODE_64BIT)
4336 {
4337 IEM_MC_BEGIN(4, 0);
4338 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4339 IEM_MC_ARG_CONST(IEMMODE, enmEffAddrMode,/*=*/pVCpu->iem.s.enmEffAddrMode, 1);
4340 IEM_MC_ARG(RTGCPTR, GCPtrVal, 2);
4341 IEM_MC_ARG(uint64_t, u64Enc, 3);
4342 IEM_MC_FETCH_GREG_U64(u64Enc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4344 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4345 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4346 IEM_MC_CALL_CIMPL_4(iemCImpl_vmread_mem, iEffSeg, enmEffAddrMode, GCPtrVal, u64Enc);
4347 IEM_MC_END();
4348 }
4349 else
4350 {
4351 IEM_MC_BEGIN(4, 0);
4352 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4353 IEM_MC_ARG_CONST(IEMMODE, enmEffAddrMode,/*=*/pVCpu->iem.s.enmEffAddrMode, 1);
4354 IEM_MC_ARG(RTGCPTR, GCPtrVal, 2);
4355 IEM_MC_ARG(uint32_t, u32Enc, 3);
4356 IEM_MC_FETCH_GREG_U32(u32Enc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4358 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4359 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4360 IEM_MC_CALL_CIMPL_4(iemCImpl_vmread_mem, iEffSeg, enmEffAddrMode, GCPtrVal, u32Enc);
4361 IEM_MC_END();
4362 }
4363 }
4364 return VINF_SUCCESS;
4365}
4366#else
4367FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4368#endif
4369
4370/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4371FNIEMOP_STUB(iemOp_AmdGrp17);
4372/* Opcode 0xf3 0x0f 0x78 - invalid */
4373/* Opcode 0xf2 0x0f 0x78 - invalid */
4374
4375/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4376#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4377FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
4378{
4379 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
4380 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVInstrDiag_Vmwrite);
4381 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVInstrDiag_Vmwrite);
4382 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
4383
4384 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4385 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4386 {
4387 /*
4388 * Register, register.
4389 */
4390 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4391 if (enmEffOpSize == IEMMODE_64BIT)
4392 {
4393 IEM_MC_BEGIN(2, 0);
4394 IEM_MC_ARG(uint64_t, u64Val, 0);
4395 IEM_MC_ARG(uint64_t, u64Enc, 1);
4396 IEM_MC_FETCH_GREG_U64(u64Val, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4397 IEM_MC_FETCH_GREG_U64(u64Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4398 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
4399 IEM_MC_END();
4400 }
4401 else
4402 {
4403 IEM_MC_BEGIN(2, 0);
4404 IEM_MC_ARG(uint32_t, u32Val, 0);
4405 IEM_MC_ARG(uint32_t, u32Enc, 1);
4406 IEM_MC_FETCH_GREG_U32(u32Val, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4407 IEM_MC_FETCH_GREG_U32(u32Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4408 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
4409 IEM_MC_END();
4410 }
4411 }
4412 else
4413 {
4414 /*
4415 * Register, memory.
4416 */
4417 if (enmEffOpSize == IEMMODE_64BIT)
4418 {
4419 IEM_MC_BEGIN(4, 0);
4420 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4421 IEM_MC_ARG_CONST(IEMMODE, enmEffAddrMode,/*=*/pVCpu->iem.s.enmEffAddrMode, 1);
4422 IEM_MC_ARG(RTGCPTR, GCPtrVal, 2);
4423 IEM_MC_ARG(uint64_t, u64Enc, 3);
4424 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4425 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4426 IEM_MC_FETCH_GREG_U64(u64Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4427 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4428 IEM_MC_CALL_CIMPL_4(iemCImpl_vmwrite_mem, iEffSeg, enmEffAddrMode, GCPtrVal, u64Enc);
4429 IEM_MC_END();
4430 }
4431 else
4432 {
4433 IEM_MC_BEGIN(4, 0);
4434 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4435 IEM_MC_ARG_CONST(IEMMODE, enmEffAddrMode,/*=*/pVCpu->iem.s.enmEffAddrMode, 1);
4436 IEM_MC_ARG(RTGCPTR, GCPtrVal, 2);
4437 IEM_MC_ARG(uint32_t, u32Enc, 3);
4438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4439 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4440 IEM_MC_FETCH_GREG_U32(u32Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4441 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4442 IEM_MC_CALL_CIMPL_4(iemCImpl_vmwrite_mem, iEffSeg, enmEffAddrMode, GCPtrVal, u32Enc);
4443 IEM_MC_END();
4444 }
4445 }
4446 return VINF_SUCCESS;
4447}
4448#else
4449FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4450#endif
4451/* Opcode 0x66 0x0f 0x79 - invalid */
4452/* Opcode 0xf3 0x0f 0x79 - invalid */
4453/* Opcode 0xf2 0x0f 0x79 - invalid */
4454
4455/* Opcode 0x0f 0x7a - invalid */
4456/* Opcode 0x66 0x0f 0x7a - invalid */
4457/* Opcode 0xf3 0x0f 0x7a - invalid */
4458/* Opcode 0xf2 0x0f 0x7a - invalid */
4459
4460/* Opcode 0x0f 0x7b - invalid */
4461/* Opcode 0x66 0x0f 0x7b - invalid */
4462/* Opcode 0xf3 0x0f 0x7b - invalid */
4463/* Opcode 0xf2 0x0f 0x7b - invalid */
4464
4465/* Opcode 0x0f 0x7c - invalid */
4466/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4467FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4468/* Opcode 0xf3 0x0f 0x7c - invalid */
4469/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4470FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4471
4472/* Opcode 0x0f 0x7d - invalid */
4473/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4474FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4475/* Opcode 0xf3 0x0f 0x7d - invalid */
4476/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4477FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4478
4479
4480/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4481FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4482{
4483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4484 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4485 {
4486 /**
4487 * @opcode 0x7e
4488 * @opcodesub rex.w=1
4489 * @oppfx none
4490 * @opcpuid mmx
4491 * @opgroup og_mmx_datamove
4492 * @opxcpttype 5
4493 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
4494 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
4495 */
4496 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4497 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4498 {
4499 /* greg64, MMX */
4500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4501 IEM_MC_BEGIN(0, 1);
4502 IEM_MC_LOCAL(uint64_t, u64Tmp);
4503
4504 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4505 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4506
4507 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4508 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4509 IEM_MC_FPU_TO_MMX_MODE();
4510
4511 IEM_MC_ADVANCE_RIP();
4512 IEM_MC_END();
4513 }
4514 else
4515 {
4516 /* [mem64], MMX */
4517 IEM_MC_BEGIN(0, 2);
4518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4519 IEM_MC_LOCAL(uint64_t, u64Tmp);
4520
4521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4523 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4524 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4525
4526 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4527 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4528 IEM_MC_FPU_TO_MMX_MODE();
4529
4530 IEM_MC_ADVANCE_RIP();
4531 IEM_MC_END();
4532 }
4533 }
4534 else
4535 {
4536 /**
4537 * @opdone
4538 * @opcode 0x7e
4539 * @opcodesub rex.w=0
4540 * @oppfx none
4541 * @opcpuid mmx
4542 * @opgroup og_mmx_datamove
4543 * @opxcpttype 5
4544 * @opfunction iemOp_movd_q_Pd_Ey
4545 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4546 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4547 */
4548 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4549 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4550 {
4551 /* greg32, MMX */
4552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4553 IEM_MC_BEGIN(0, 1);
4554 IEM_MC_LOCAL(uint32_t, u32Tmp);
4555
4556 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4557 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4558
4559 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4560 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4561 IEM_MC_FPU_TO_MMX_MODE();
4562
4563 IEM_MC_ADVANCE_RIP();
4564 IEM_MC_END();
4565 }
4566 else
4567 {
4568 /* [mem32], MMX */
4569 IEM_MC_BEGIN(0, 2);
4570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4571 IEM_MC_LOCAL(uint32_t, u32Tmp);
4572
4573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4575 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4576 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4577
4578 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4579 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4580 IEM_MC_FPU_TO_MMX_MODE();
4581
4582 IEM_MC_ADVANCE_RIP();
4583 IEM_MC_END();
4584 }
4585 }
4586 return VINF_SUCCESS;
4587
4588}
4589
4590
4591FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4592{
4593 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4594 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4595 {
4596 /**
4597 * @opcode 0x7e
4598 * @opcodesub rex.w=1
4599 * @oppfx 0x66
4600 * @opcpuid sse2
4601 * @opgroup og_sse2_simdint_datamove
4602 * @opxcpttype 5
4603 * @optest 64-bit / op1=1 op2=2 -> op1=2
4604 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4605 */
4606 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4607 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4608 {
4609 /* greg64, XMM */
4610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4611 IEM_MC_BEGIN(0, 1);
4612 IEM_MC_LOCAL(uint64_t, u64Tmp);
4613
4614 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4615 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4616
4617 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4618 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4619
4620 IEM_MC_ADVANCE_RIP();
4621 IEM_MC_END();
4622 }
4623 else
4624 {
4625 /* [mem64], XMM */
4626 IEM_MC_BEGIN(0, 2);
4627 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4628 IEM_MC_LOCAL(uint64_t, u64Tmp);
4629
4630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4632 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4633 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4634
4635 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4636 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4637
4638 IEM_MC_ADVANCE_RIP();
4639 IEM_MC_END();
4640 }
4641 }
4642 else
4643 {
4644 /**
4645 * @opdone
4646 * @opcode 0x7e
4647 * @opcodesub rex.w=0
4648 * @oppfx 0x66
4649 * @opcpuid sse2
4650 * @opgroup og_sse2_simdint_datamove
4651 * @opxcpttype 5
4652 * @opfunction iemOp_movd_q_Vy_Ey
4653 * @optest op1=1 op2=2 -> op1=2
4654 * @optest op1=0 op2=-42 -> op1=-42
4655 */
4656 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4657 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4658 {
4659 /* greg32, XMM */
4660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4661 IEM_MC_BEGIN(0, 1);
4662 IEM_MC_LOCAL(uint32_t, u32Tmp);
4663
4664 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4665 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4666
4667 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4668 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4669
4670 IEM_MC_ADVANCE_RIP();
4671 IEM_MC_END();
4672 }
4673 else
4674 {
4675 /* [mem32], XMM */
4676 IEM_MC_BEGIN(0, 2);
4677 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4678 IEM_MC_LOCAL(uint32_t, u32Tmp);
4679
4680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4682 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4683 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4684
4685 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4686 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4687
4688 IEM_MC_ADVANCE_RIP();
4689 IEM_MC_END();
4690 }
4691 }
4692 return VINF_SUCCESS;
4693
4694}
4695
4696/**
4697 * @opcode 0x7e
4698 * @oppfx 0xf3
4699 * @opcpuid sse2
4700 * @opgroup og_sse2_pcksclr_datamove
4701 * @opxcpttype none
4702 * @optest op1=1 op2=2 -> op1=2
4703 * @optest op1=0 op2=-42 -> op1=-42
4704 */
4705FNIEMOP_DEF(iemOp_movq_Vq_Wq)
4706{
4707 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4708 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4709 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4710 {
4711 /*
4712 * Register, register.
4713 */
4714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4715 IEM_MC_BEGIN(0, 2);
4716 IEM_MC_LOCAL(uint64_t, uSrc);
4717
4718 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4719 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4720
4721 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4722 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4723
4724 IEM_MC_ADVANCE_RIP();
4725 IEM_MC_END();
4726 }
4727 else
4728 {
4729 /*
4730 * Memory, register.
4731 */
4732 IEM_MC_BEGIN(0, 2);
4733 IEM_MC_LOCAL(uint64_t, uSrc);
4734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4735
4736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4738 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4739 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4740
4741 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4742 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4743
4744 IEM_MC_ADVANCE_RIP();
4745 IEM_MC_END();
4746 }
4747 return VINF_SUCCESS;
4748}
4749
4750/* Opcode 0xf2 0x0f 0x7e - invalid */
4751
4752
4753/** Opcode 0x0f 0x7f - movq Qq, Pq */
4754FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4755{
4756 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4757 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4758 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4759 {
4760 /*
4761 * Register, register.
4762 */
4763 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4764 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4766 IEM_MC_BEGIN(0, 1);
4767 IEM_MC_LOCAL(uint64_t, u64Tmp);
4768 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4769 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4770 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4771 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4772 IEM_MC_ADVANCE_RIP();
4773 IEM_MC_END();
4774 }
4775 else
4776 {
4777 /*
4778 * Register, memory.
4779 */
4780 IEM_MC_BEGIN(0, 2);
4781 IEM_MC_LOCAL(uint64_t, u64Tmp);
4782 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4783
4784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4786 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4787 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4788
4789 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4790 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4791
4792 IEM_MC_ADVANCE_RIP();
4793 IEM_MC_END();
4794 }
4795 return VINF_SUCCESS;
4796}
4797
4798/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4799FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4800{
4801 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4803 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4804 {
4805 /*
4806 * Register, register.
4807 */
4808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4809 IEM_MC_BEGIN(0, 0);
4810 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4811 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4812 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4813 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4814 IEM_MC_ADVANCE_RIP();
4815 IEM_MC_END();
4816 }
4817 else
4818 {
4819 /*
4820 * Register, memory.
4821 */
4822 IEM_MC_BEGIN(0, 2);
4823 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4825
4826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4828 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4829 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4830
4831 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4832 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4833
4834 IEM_MC_ADVANCE_RIP();
4835 IEM_MC_END();
4836 }
4837 return VINF_SUCCESS;
4838}
4839
4840/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4841FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4842{
4843 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4844 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4845 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4846 {
4847 /*
4848 * Register, register.
4849 */
4850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4851 IEM_MC_BEGIN(0, 0);
4852 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4853 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4854 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4855 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4856 IEM_MC_ADVANCE_RIP();
4857 IEM_MC_END();
4858 }
4859 else
4860 {
4861 /*
4862 * Register, memory.
4863 */
4864 IEM_MC_BEGIN(0, 2);
4865 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4867
4868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4870 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4871 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4872
4873 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4874 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4875
4876 IEM_MC_ADVANCE_RIP();
4877 IEM_MC_END();
4878 }
4879 return VINF_SUCCESS;
4880}
4881
4882/* Opcode 0xf2 0x0f 0x7f - invalid */
4883
4884
4885
4886/** Opcode 0x0f 0x80. */
4887FNIEMOP_DEF(iemOp_jo_Jv)
4888{
4889 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4890 IEMOP_HLP_MIN_386();
4891 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4892 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4893 {
4894 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4896
4897 IEM_MC_BEGIN(0, 0);
4898 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4899 IEM_MC_REL_JMP_S16(i16Imm);
4900 } IEM_MC_ELSE() {
4901 IEM_MC_ADVANCE_RIP();
4902 } IEM_MC_ENDIF();
4903 IEM_MC_END();
4904 }
4905 else
4906 {
4907 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4909
4910 IEM_MC_BEGIN(0, 0);
4911 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4912 IEM_MC_REL_JMP_S32(i32Imm);
4913 } IEM_MC_ELSE() {
4914 IEM_MC_ADVANCE_RIP();
4915 } IEM_MC_ENDIF();
4916 IEM_MC_END();
4917 }
4918 return VINF_SUCCESS;
4919}
4920
4921
4922/** Opcode 0x0f 0x81. */
4923FNIEMOP_DEF(iemOp_jno_Jv)
4924{
4925 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4926 IEMOP_HLP_MIN_386();
4927 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4928 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4929 {
4930 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4932
4933 IEM_MC_BEGIN(0, 0);
4934 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4935 IEM_MC_ADVANCE_RIP();
4936 } IEM_MC_ELSE() {
4937 IEM_MC_REL_JMP_S16(i16Imm);
4938 } IEM_MC_ENDIF();
4939 IEM_MC_END();
4940 }
4941 else
4942 {
4943 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4945
4946 IEM_MC_BEGIN(0, 0);
4947 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4948 IEM_MC_ADVANCE_RIP();
4949 } IEM_MC_ELSE() {
4950 IEM_MC_REL_JMP_S32(i32Imm);
4951 } IEM_MC_ENDIF();
4952 IEM_MC_END();
4953 }
4954 return VINF_SUCCESS;
4955}
4956
4957
4958/** Opcode 0x0f 0x82. */
4959FNIEMOP_DEF(iemOp_jc_Jv)
4960{
4961 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4962 IEMOP_HLP_MIN_386();
4963 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4964 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4965 {
4966 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4968
4969 IEM_MC_BEGIN(0, 0);
4970 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4971 IEM_MC_REL_JMP_S16(i16Imm);
4972 } IEM_MC_ELSE() {
4973 IEM_MC_ADVANCE_RIP();
4974 } IEM_MC_ENDIF();
4975 IEM_MC_END();
4976 }
4977 else
4978 {
4979 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4981
4982 IEM_MC_BEGIN(0, 0);
4983 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4984 IEM_MC_REL_JMP_S32(i32Imm);
4985 } IEM_MC_ELSE() {
4986 IEM_MC_ADVANCE_RIP();
4987 } IEM_MC_ENDIF();
4988 IEM_MC_END();
4989 }
4990 return VINF_SUCCESS;
4991}
4992
4993
4994/** Opcode 0x0f 0x83. */
4995FNIEMOP_DEF(iemOp_jnc_Jv)
4996{
4997 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4998 IEMOP_HLP_MIN_386();
4999 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5000 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5001 {
5002 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5004
5005 IEM_MC_BEGIN(0, 0);
5006 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5007 IEM_MC_ADVANCE_RIP();
5008 } IEM_MC_ELSE() {
5009 IEM_MC_REL_JMP_S16(i16Imm);
5010 } IEM_MC_ENDIF();
5011 IEM_MC_END();
5012 }
5013 else
5014 {
5015 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5017
5018 IEM_MC_BEGIN(0, 0);
5019 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5020 IEM_MC_ADVANCE_RIP();
5021 } IEM_MC_ELSE() {
5022 IEM_MC_REL_JMP_S32(i32Imm);
5023 } IEM_MC_ENDIF();
5024 IEM_MC_END();
5025 }
5026 return VINF_SUCCESS;
5027}
5028
5029
5030/** Opcode 0x0f 0x84. */
5031FNIEMOP_DEF(iemOp_je_Jv)
5032{
5033 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
5034 IEMOP_HLP_MIN_386();
5035 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5036 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5037 {
5038 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5040
5041 IEM_MC_BEGIN(0, 0);
5042 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5043 IEM_MC_REL_JMP_S16(i16Imm);
5044 } IEM_MC_ELSE() {
5045 IEM_MC_ADVANCE_RIP();
5046 } IEM_MC_ENDIF();
5047 IEM_MC_END();
5048 }
5049 else
5050 {
5051 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5053
5054 IEM_MC_BEGIN(0, 0);
5055 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5056 IEM_MC_REL_JMP_S32(i32Imm);
5057 } IEM_MC_ELSE() {
5058 IEM_MC_ADVANCE_RIP();
5059 } IEM_MC_ENDIF();
5060 IEM_MC_END();
5061 }
5062 return VINF_SUCCESS;
5063}
5064
5065
5066/** Opcode 0x0f 0x85. */
5067FNIEMOP_DEF(iemOp_jne_Jv)
5068{
5069 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
5070 IEMOP_HLP_MIN_386();
5071 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5072 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5073 {
5074 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5076
5077 IEM_MC_BEGIN(0, 0);
5078 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5079 IEM_MC_ADVANCE_RIP();
5080 } IEM_MC_ELSE() {
5081 IEM_MC_REL_JMP_S16(i16Imm);
5082 } IEM_MC_ENDIF();
5083 IEM_MC_END();
5084 }
5085 else
5086 {
5087 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5089
5090 IEM_MC_BEGIN(0, 0);
5091 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5092 IEM_MC_ADVANCE_RIP();
5093 } IEM_MC_ELSE() {
5094 IEM_MC_REL_JMP_S32(i32Imm);
5095 } IEM_MC_ENDIF();
5096 IEM_MC_END();
5097 }
5098 return VINF_SUCCESS;
5099}
5100
5101
5102/** Opcode 0x0f 0x86. */
5103FNIEMOP_DEF(iemOp_jbe_Jv)
5104{
5105 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
5106 IEMOP_HLP_MIN_386();
5107 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5108 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5109 {
5110 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5112
5113 IEM_MC_BEGIN(0, 0);
5114 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5115 IEM_MC_REL_JMP_S16(i16Imm);
5116 } IEM_MC_ELSE() {
5117 IEM_MC_ADVANCE_RIP();
5118 } IEM_MC_ENDIF();
5119 IEM_MC_END();
5120 }
5121 else
5122 {
5123 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5125
5126 IEM_MC_BEGIN(0, 0);
5127 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5128 IEM_MC_REL_JMP_S32(i32Imm);
5129 } IEM_MC_ELSE() {
5130 IEM_MC_ADVANCE_RIP();
5131 } IEM_MC_ENDIF();
5132 IEM_MC_END();
5133 }
5134 return VINF_SUCCESS;
5135}
5136
5137
5138/** Opcode 0x0f 0x87. */
5139FNIEMOP_DEF(iemOp_jnbe_Jv)
5140{
5141 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
5142 IEMOP_HLP_MIN_386();
5143 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5144 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5145 {
5146 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5148
5149 IEM_MC_BEGIN(0, 0);
5150 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5151 IEM_MC_ADVANCE_RIP();
5152 } IEM_MC_ELSE() {
5153 IEM_MC_REL_JMP_S16(i16Imm);
5154 } IEM_MC_ENDIF();
5155 IEM_MC_END();
5156 }
5157 else
5158 {
5159 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5161
5162 IEM_MC_BEGIN(0, 0);
5163 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5164 IEM_MC_ADVANCE_RIP();
5165 } IEM_MC_ELSE() {
5166 IEM_MC_REL_JMP_S32(i32Imm);
5167 } IEM_MC_ENDIF();
5168 IEM_MC_END();
5169 }
5170 return VINF_SUCCESS;
5171}
5172
5173
5174/** Opcode 0x0f 0x88. */
5175FNIEMOP_DEF(iemOp_js_Jv)
5176{
5177 IEMOP_MNEMONIC(js_Jv, "js Jv");
5178 IEMOP_HLP_MIN_386();
5179 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5180 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5181 {
5182 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5184
5185 IEM_MC_BEGIN(0, 0);
5186 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5187 IEM_MC_REL_JMP_S16(i16Imm);
5188 } IEM_MC_ELSE() {
5189 IEM_MC_ADVANCE_RIP();
5190 } IEM_MC_ENDIF();
5191 IEM_MC_END();
5192 }
5193 else
5194 {
5195 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5197
5198 IEM_MC_BEGIN(0, 0);
5199 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5200 IEM_MC_REL_JMP_S32(i32Imm);
5201 } IEM_MC_ELSE() {
5202 IEM_MC_ADVANCE_RIP();
5203 } IEM_MC_ENDIF();
5204 IEM_MC_END();
5205 }
5206 return VINF_SUCCESS;
5207}
5208
5209
5210/** Opcode 0x0f 0x89. */
5211FNIEMOP_DEF(iemOp_jns_Jv)
5212{
5213 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
5214 IEMOP_HLP_MIN_386();
5215 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5216 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5217 {
5218 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5220
5221 IEM_MC_BEGIN(0, 0);
5222 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5223 IEM_MC_ADVANCE_RIP();
5224 } IEM_MC_ELSE() {
5225 IEM_MC_REL_JMP_S16(i16Imm);
5226 } IEM_MC_ENDIF();
5227 IEM_MC_END();
5228 }
5229 else
5230 {
5231 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5233
5234 IEM_MC_BEGIN(0, 0);
5235 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5236 IEM_MC_ADVANCE_RIP();
5237 } IEM_MC_ELSE() {
5238 IEM_MC_REL_JMP_S32(i32Imm);
5239 } IEM_MC_ENDIF();
5240 IEM_MC_END();
5241 }
5242 return VINF_SUCCESS;
5243}
5244
5245
5246/** Opcode 0x0f 0x8a. */
5247FNIEMOP_DEF(iemOp_jp_Jv)
5248{
5249 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
5250 IEMOP_HLP_MIN_386();
5251 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5252 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5253 {
5254 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5256
5257 IEM_MC_BEGIN(0, 0);
5258 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5259 IEM_MC_REL_JMP_S16(i16Imm);
5260 } IEM_MC_ELSE() {
5261 IEM_MC_ADVANCE_RIP();
5262 } IEM_MC_ENDIF();
5263 IEM_MC_END();
5264 }
5265 else
5266 {
5267 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5269
5270 IEM_MC_BEGIN(0, 0);
5271 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5272 IEM_MC_REL_JMP_S32(i32Imm);
5273 } IEM_MC_ELSE() {
5274 IEM_MC_ADVANCE_RIP();
5275 } IEM_MC_ENDIF();
5276 IEM_MC_END();
5277 }
5278 return VINF_SUCCESS;
5279}
5280
5281
5282/** Opcode 0x0f 0x8b. */
5283FNIEMOP_DEF(iemOp_jnp_Jv)
5284{
5285 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
5286 IEMOP_HLP_MIN_386();
5287 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5288 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5289 {
5290 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5292
5293 IEM_MC_BEGIN(0, 0);
5294 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5295 IEM_MC_ADVANCE_RIP();
5296 } IEM_MC_ELSE() {
5297 IEM_MC_REL_JMP_S16(i16Imm);
5298 } IEM_MC_ENDIF();
5299 IEM_MC_END();
5300 }
5301 else
5302 {
5303 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5305
5306 IEM_MC_BEGIN(0, 0);
5307 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5308 IEM_MC_ADVANCE_RIP();
5309 } IEM_MC_ELSE() {
5310 IEM_MC_REL_JMP_S32(i32Imm);
5311 } IEM_MC_ENDIF();
5312 IEM_MC_END();
5313 }
5314 return VINF_SUCCESS;
5315}
5316
5317
5318/** Opcode 0x0f 0x8c. */
5319FNIEMOP_DEF(iemOp_jl_Jv)
5320{
5321 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
5322 IEMOP_HLP_MIN_386();
5323 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5324 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5325 {
5326 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5328
5329 IEM_MC_BEGIN(0, 0);
5330 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5331 IEM_MC_REL_JMP_S16(i16Imm);
5332 } IEM_MC_ELSE() {
5333 IEM_MC_ADVANCE_RIP();
5334 } IEM_MC_ENDIF();
5335 IEM_MC_END();
5336 }
5337 else
5338 {
5339 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5341
5342 IEM_MC_BEGIN(0, 0);
5343 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5344 IEM_MC_REL_JMP_S32(i32Imm);
5345 } IEM_MC_ELSE() {
5346 IEM_MC_ADVANCE_RIP();
5347 } IEM_MC_ENDIF();
5348 IEM_MC_END();
5349 }
5350 return VINF_SUCCESS;
5351}
5352
5353
5354/** Opcode 0x0f 0x8d. */
5355FNIEMOP_DEF(iemOp_jnl_Jv)
5356{
5357 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5358 IEMOP_HLP_MIN_386();
5359 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5360 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5361 {
5362 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5364
5365 IEM_MC_BEGIN(0, 0);
5366 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5367 IEM_MC_ADVANCE_RIP();
5368 } IEM_MC_ELSE() {
5369 IEM_MC_REL_JMP_S16(i16Imm);
5370 } IEM_MC_ENDIF();
5371 IEM_MC_END();
5372 }
5373 else
5374 {
5375 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5377
5378 IEM_MC_BEGIN(0, 0);
5379 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5380 IEM_MC_ADVANCE_RIP();
5381 } IEM_MC_ELSE() {
5382 IEM_MC_REL_JMP_S32(i32Imm);
5383 } IEM_MC_ENDIF();
5384 IEM_MC_END();
5385 }
5386 return VINF_SUCCESS;
5387}
5388
5389
5390/** Opcode 0x0f 0x8e. */
5391FNIEMOP_DEF(iemOp_jle_Jv)
5392{
5393 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5394 IEMOP_HLP_MIN_386();
5395 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5396 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5397 {
5398 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5400
5401 IEM_MC_BEGIN(0, 0);
5402 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5403 IEM_MC_REL_JMP_S16(i16Imm);
5404 } IEM_MC_ELSE() {
5405 IEM_MC_ADVANCE_RIP();
5406 } IEM_MC_ENDIF();
5407 IEM_MC_END();
5408 }
5409 else
5410 {
5411 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5413
5414 IEM_MC_BEGIN(0, 0);
5415 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5416 IEM_MC_REL_JMP_S32(i32Imm);
5417 } IEM_MC_ELSE() {
5418 IEM_MC_ADVANCE_RIP();
5419 } IEM_MC_ENDIF();
5420 IEM_MC_END();
5421 }
5422 return VINF_SUCCESS;
5423}
5424
5425
5426/** Opcode 0x0f 0x8f. */
5427FNIEMOP_DEF(iemOp_jnle_Jv)
5428{
5429 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5430 IEMOP_HLP_MIN_386();
5431 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5432 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5433 {
5434 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5436
5437 IEM_MC_BEGIN(0, 0);
5438 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5439 IEM_MC_ADVANCE_RIP();
5440 } IEM_MC_ELSE() {
5441 IEM_MC_REL_JMP_S16(i16Imm);
5442 } IEM_MC_ENDIF();
5443 IEM_MC_END();
5444 }
5445 else
5446 {
5447 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5449
5450 IEM_MC_BEGIN(0, 0);
5451 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5452 IEM_MC_ADVANCE_RIP();
5453 } IEM_MC_ELSE() {
5454 IEM_MC_REL_JMP_S32(i32Imm);
5455 } IEM_MC_ENDIF();
5456 IEM_MC_END();
5457 }
5458 return VINF_SUCCESS;
5459}
5460
5461
5462/** Opcode 0x0f 0x90. */
5463FNIEMOP_DEF(iemOp_seto_Eb)
5464{
5465 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5466 IEMOP_HLP_MIN_386();
5467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5468
5469 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5470 * any way. AMD says it's "unused", whatever that means. We're
5471 * ignoring for now. */
5472 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5473 {
5474 /* register target */
5475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5476 IEM_MC_BEGIN(0, 0);
5477 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5478 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5479 } IEM_MC_ELSE() {
5480 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5481 } IEM_MC_ENDIF();
5482 IEM_MC_ADVANCE_RIP();
5483 IEM_MC_END();
5484 }
5485 else
5486 {
5487 /* memory target */
5488 IEM_MC_BEGIN(0, 1);
5489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5492 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5493 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5494 } IEM_MC_ELSE() {
5495 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5496 } IEM_MC_ENDIF();
5497 IEM_MC_ADVANCE_RIP();
5498 IEM_MC_END();
5499 }
5500 return VINF_SUCCESS;
5501}
5502
5503
5504/** Opcode 0x0f 0x91. */
5505FNIEMOP_DEF(iemOp_setno_Eb)
5506{
5507 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5508 IEMOP_HLP_MIN_386();
5509 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5510
5511 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5512 * any way. AMD says it's "unused", whatever that means. We're
5513 * ignoring for now. */
5514 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5515 {
5516 /* register target */
5517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5518 IEM_MC_BEGIN(0, 0);
5519 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5520 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5521 } IEM_MC_ELSE() {
5522 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5523 } IEM_MC_ENDIF();
5524 IEM_MC_ADVANCE_RIP();
5525 IEM_MC_END();
5526 }
5527 else
5528 {
5529 /* memory target */
5530 IEM_MC_BEGIN(0, 1);
5531 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5534 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5535 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5536 } IEM_MC_ELSE() {
5537 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5538 } IEM_MC_ENDIF();
5539 IEM_MC_ADVANCE_RIP();
5540 IEM_MC_END();
5541 }
5542 return VINF_SUCCESS;
5543}
5544
5545
5546/** Opcode 0x0f 0x92. */
5547FNIEMOP_DEF(iemOp_setc_Eb)
5548{
5549 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5550 IEMOP_HLP_MIN_386();
5551 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5552
5553 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5554 * any way. AMD says it's "unused", whatever that means. We're
5555 * ignoring for now. */
5556 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5557 {
5558 /* register target */
5559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5560 IEM_MC_BEGIN(0, 0);
5561 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5562 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5563 } IEM_MC_ELSE() {
5564 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5565 } IEM_MC_ENDIF();
5566 IEM_MC_ADVANCE_RIP();
5567 IEM_MC_END();
5568 }
5569 else
5570 {
5571 /* memory target */
5572 IEM_MC_BEGIN(0, 1);
5573 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5574 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5576 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5577 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5578 } IEM_MC_ELSE() {
5579 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5580 } IEM_MC_ENDIF();
5581 IEM_MC_ADVANCE_RIP();
5582 IEM_MC_END();
5583 }
5584 return VINF_SUCCESS;
5585}
5586
5587
5588/** Opcode 0x0f 0x93. */
5589FNIEMOP_DEF(iemOp_setnc_Eb)
5590{
5591 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5592 IEMOP_HLP_MIN_386();
5593 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5594
5595 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5596 * any way. AMD says it's "unused", whatever that means. We're
5597 * ignoring for now. */
5598 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5599 {
5600 /* register target */
5601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5602 IEM_MC_BEGIN(0, 0);
5603 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5604 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5605 } IEM_MC_ELSE() {
5606 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5607 } IEM_MC_ENDIF();
5608 IEM_MC_ADVANCE_RIP();
5609 IEM_MC_END();
5610 }
5611 else
5612 {
5613 /* memory target */
5614 IEM_MC_BEGIN(0, 1);
5615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5618 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5619 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5620 } IEM_MC_ELSE() {
5621 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5622 } IEM_MC_ENDIF();
5623 IEM_MC_ADVANCE_RIP();
5624 IEM_MC_END();
5625 }
5626 return VINF_SUCCESS;
5627}
5628
5629
5630/** Opcode 0x0f 0x94. */
5631FNIEMOP_DEF(iemOp_sete_Eb)
5632{
5633 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5634 IEMOP_HLP_MIN_386();
5635 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5636
5637 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5638 * any way. AMD says it's "unused", whatever that means. We're
5639 * ignoring for now. */
5640 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5641 {
5642 /* register target */
5643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5644 IEM_MC_BEGIN(0, 0);
5645 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5646 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5647 } IEM_MC_ELSE() {
5648 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5649 } IEM_MC_ENDIF();
5650 IEM_MC_ADVANCE_RIP();
5651 IEM_MC_END();
5652 }
5653 else
5654 {
5655 /* memory target */
5656 IEM_MC_BEGIN(0, 1);
5657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5660 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5661 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5662 } IEM_MC_ELSE() {
5663 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5664 } IEM_MC_ENDIF();
5665 IEM_MC_ADVANCE_RIP();
5666 IEM_MC_END();
5667 }
5668 return VINF_SUCCESS;
5669}
5670
5671
5672/** Opcode 0x0f 0x95. */
5673FNIEMOP_DEF(iemOp_setne_Eb)
5674{
5675 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5676 IEMOP_HLP_MIN_386();
5677 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5678
5679 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5680 * any way. AMD says it's "unused", whatever that means. We're
5681 * ignoring for now. */
5682 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5683 {
5684 /* register target */
5685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5686 IEM_MC_BEGIN(0, 0);
5687 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5688 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5689 } IEM_MC_ELSE() {
5690 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5691 } IEM_MC_ENDIF();
5692 IEM_MC_ADVANCE_RIP();
5693 IEM_MC_END();
5694 }
5695 else
5696 {
5697 /* memory target */
5698 IEM_MC_BEGIN(0, 1);
5699 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5702 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5703 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5704 } IEM_MC_ELSE() {
5705 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5706 } IEM_MC_ENDIF();
5707 IEM_MC_ADVANCE_RIP();
5708 IEM_MC_END();
5709 }
5710 return VINF_SUCCESS;
5711}
5712
5713
5714/** Opcode 0x0f 0x96. */
5715FNIEMOP_DEF(iemOp_setbe_Eb)
5716{
5717 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5718 IEMOP_HLP_MIN_386();
5719 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5720
5721 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5722 * any way. AMD says it's "unused", whatever that means. We're
5723 * ignoring for now. */
5724 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5725 {
5726 /* register target */
5727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5728 IEM_MC_BEGIN(0, 0);
5729 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5730 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5731 } IEM_MC_ELSE() {
5732 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5733 } IEM_MC_ENDIF();
5734 IEM_MC_ADVANCE_RIP();
5735 IEM_MC_END();
5736 }
5737 else
5738 {
5739 /* memory target */
5740 IEM_MC_BEGIN(0, 1);
5741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5744 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5745 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5746 } IEM_MC_ELSE() {
5747 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5748 } IEM_MC_ENDIF();
5749 IEM_MC_ADVANCE_RIP();
5750 IEM_MC_END();
5751 }
5752 return VINF_SUCCESS;
5753}
5754
5755
5756/** Opcode 0x0f 0x97. */
5757FNIEMOP_DEF(iemOp_setnbe_Eb)
5758{
5759 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5760 IEMOP_HLP_MIN_386();
5761 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5762
5763 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5764 * any way. AMD says it's "unused", whatever that means. We're
5765 * ignoring for now. */
5766 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5767 {
5768 /* register target */
5769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5770 IEM_MC_BEGIN(0, 0);
5771 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5772 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5773 } IEM_MC_ELSE() {
5774 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5775 } IEM_MC_ENDIF();
5776 IEM_MC_ADVANCE_RIP();
5777 IEM_MC_END();
5778 }
5779 else
5780 {
5781 /* memory target */
5782 IEM_MC_BEGIN(0, 1);
5783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5786 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5787 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5788 } IEM_MC_ELSE() {
5789 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5790 } IEM_MC_ENDIF();
5791 IEM_MC_ADVANCE_RIP();
5792 IEM_MC_END();
5793 }
5794 return VINF_SUCCESS;
5795}
5796
5797
5798/** Opcode 0x0f 0x98. */
5799FNIEMOP_DEF(iemOp_sets_Eb)
5800{
5801 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5802 IEMOP_HLP_MIN_386();
5803 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5804
5805 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5806 * any way. AMD says it's "unused", whatever that means. We're
5807 * ignoring for now. */
5808 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5809 {
5810 /* register target */
5811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5812 IEM_MC_BEGIN(0, 0);
5813 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5814 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5815 } IEM_MC_ELSE() {
5816 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5817 } IEM_MC_ENDIF();
5818 IEM_MC_ADVANCE_RIP();
5819 IEM_MC_END();
5820 }
5821 else
5822 {
5823 /* memory target */
5824 IEM_MC_BEGIN(0, 1);
5825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5828 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5829 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5830 } IEM_MC_ELSE() {
5831 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5832 } IEM_MC_ENDIF();
5833 IEM_MC_ADVANCE_RIP();
5834 IEM_MC_END();
5835 }
5836 return VINF_SUCCESS;
5837}
5838
5839
5840/** Opcode 0x0f 0x99. */
5841FNIEMOP_DEF(iemOp_setns_Eb)
5842{
5843 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5844 IEMOP_HLP_MIN_386();
5845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5846
5847 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5848 * any way. AMD says it's "unused", whatever that means. We're
5849 * ignoring for now. */
5850 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5851 {
5852 /* register target */
5853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5854 IEM_MC_BEGIN(0, 0);
5855 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5856 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5857 } IEM_MC_ELSE() {
5858 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5859 } IEM_MC_ENDIF();
5860 IEM_MC_ADVANCE_RIP();
5861 IEM_MC_END();
5862 }
5863 else
5864 {
5865 /* memory target */
5866 IEM_MC_BEGIN(0, 1);
5867 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5870 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5871 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5872 } IEM_MC_ELSE() {
5873 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5874 } IEM_MC_ENDIF();
5875 IEM_MC_ADVANCE_RIP();
5876 IEM_MC_END();
5877 }
5878 return VINF_SUCCESS;
5879}
5880
5881
5882/** Opcode 0x0f 0x9a. */
5883FNIEMOP_DEF(iemOp_setp_Eb)
5884{
5885 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5886 IEMOP_HLP_MIN_386();
5887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5888
5889 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5890 * any way. AMD says it's "unused", whatever that means. We're
5891 * ignoring for now. */
5892 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5893 {
5894 /* register target */
5895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5896 IEM_MC_BEGIN(0, 0);
5897 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5898 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5899 } IEM_MC_ELSE() {
5900 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5901 } IEM_MC_ENDIF();
5902 IEM_MC_ADVANCE_RIP();
5903 IEM_MC_END();
5904 }
5905 else
5906 {
5907 /* memory target */
5908 IEM_MC_BEGIN(0, 1);
5909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5912 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5913 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5914 } IEM_MC_ELSE() {
5915 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5916 } IEM_MC_ENDIF();
5917 IEM_MC_ADVANCE_RIP();
5918 IEM_MC_END();
5919 }
5920 return VINF_SUCCESS;
5921}
5922
5923
5924/** Opcode 0x0f 0x9b. */
5925FNIEMOP_DEF(iemOp_setnp_Eb)
5926{
5927 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5928 IEMOP_HLP_MIN_386();
5929 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5930
5931 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5932 * any way. AMD says it's "unused", whatever that means. We're
5933 * ignoring for now. */
5934 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5935 {
5936 /* register target */
5937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5938 IEM_MC_BEGIN(0, 0);
5939 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5940 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5941 } IEM_MC_ELSE() {
5942 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5943 } IEM_MC_ENDIF();
5944 IEM_MC_ADVANCE_RIP();
5945 IEM_MC_END();
5946 }
5947 else
5948 {
5949 /* memory target */
5950 IEM_MC_BEGIN(0, 1);
5951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5954 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5955 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5956 } IEM_MC_ELSE() {
5957 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5958 } IEM_MC_ENDIF();
5959 IEM_MC_ADVANCE_RIP();
5960 IEM_MC_END();
5961 }
5962 return VINF_SUCCESS;
5963}
5964
5965
5966/** Opcode 0x0f 0x9c. */
5967FNIEMOP_DEF(iemOp_setl_Eb)
5968{
5969 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5970 IEMOP_HLP_MIN_386();
5971 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5972
5973 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5974 * any way. AMD says it's "unused", whatever that means. We're
5975 * ignoring for now. */
5976 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5977 {
5978 /* register target */
5979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5980 IEM_MC_BEGIN(0, 0);
5981 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5982 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5983 } IEM_MC_ELSE() {
5984 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5985 } IEM_MC_ENDIF();
5986 IEM_MC_ADVANCE_RIP();
5987 IEM_MC_END();
5988 }
5989 else
5990 {
5991 /* memory target */
5992 IEM_MC_BEGIN(0, 1);
5993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5996 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5997 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5998 } IEM_MC_ELSE() {
5999 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6000 } IEM_MC_ENDIF();
6001 IEM_MC_ADVANCE_RIP();
6002 IEM_MC_END();
6003 }
6004 return VINF_SUCCESS;
6005}
6006
6007
6008/** Opcode 0x0f 0x9d. */
6009FNIEMOP_DEF(iemOp_setnl_Eb)
6010{
6011 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
6012 IEMOP_HLP_MIN_386();
6013 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6014
6015 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6016 * any way. AMD says it's "unused", whatever that means. We're
6017 * ignoring for now. */
6018 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6019 {
6020 /* register target */
6021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6022 IEM_MC_BEGIN(0, 0);
6023 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6024 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6025 } IEM_MC_ELSE() {
6026 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6027 } IEM_MC_ENDIF();
6028 IEM_MC_ADVANCE_RIP();
6029 IEM_MC_END();
6030 }
6031 else
6032 {
6033 /* memory target */
6034 IEM_MC_BEGIN(0, 1);
6035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6038 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6039 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6040 } IEM_MC_ELSE() {
6041 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6042 } IEM_MC_ENDIF();
6043 IEM_MC_ADVANCE_RIP();
6044 IEM_MC_END();
6045 }
6046 return VINF_SUCCESS;
6047}
6048
6049
6050/** Opcode 0x0f 0x9e. */
6051FNIEMOP_DEF(iemOp_setle_Eb)
6052{
6053 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
6054 IEMOP_HLP_MIN_386();
6055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6056
6057 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6058 * any way. AMD says it's "unused", whatever that means. We're
6059 * ignoring for now. */
6060 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6061 {
6062 /* register target */
6063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6064 IEM_MC_BEGIN(0, 0);
6065 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6066 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6067 } IEM_MC_ELSE() {
6068 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6069 } IEM_MC_ENDIF();
6070 IEM_MC_ADVANCE_RIP();
6071 IEM_MC_END();
6072 }
6073 else
6074 {
6075 /* memory target */
6076 IEM_MC_BEGIN(0, 1);
6077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6080 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6081 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6082 } IEM_MC_ELSE() {
6083 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6084 } IEM_MC_ENDIF();
6085 IEM_MC_ADVANCE_RIP();
6086 IEM_MC_END();
6087 }
6088 return VINF_SUCCESS;
6089}
6090
6091
6092/** Opcode 0x0f 0x9f. */
6093FNIEMOP_DEF(iemOp_setnle_Eb)
6094{
6095 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
6096 IEMOP_HLP_MIN_386();
6097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6098
6099 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6100 * any way. AMD says it's "unused", whatever that means. We're
6101 * ignoring for now. */
6102 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6103 {
6104 /* register target */
6105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6106 IEM_MC_BEGIN(0, 0);
6107 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6108 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6109 } IEM_MC_ELSE() {
6110 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6111 } IEM_MC_ENDIF();
6112 IEM_MC_ADVANCE_RIP();
6113 IEM_MC_END();
6114 }
6115 else
6116 {
6117 /* memory target */
6118 IEM_MC_BEGIN(0, 1);
6119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6122 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6123 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6124 } IEM_MC_ELSE() {
6125 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6126 } IEM_MC_ENDIF();
6127 IEM_MC_ADVANCE_RIP();
6128 IEM_MC_END();
6129 }
6130 return VINF_SUCCESS;
6131}
6132
6133
6134/**
6135 * Common 'push segment-register' helper.
6136 */
6137FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
6138{
6139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6140 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
6141 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6142
6143 switch (pVCpu->iem.s.enmEffOpSize)
6144 {
6145 case IEMMODE_16BIT:
6146 IEM_MC_BEGIN(0, 1);
6147 IEM_MC_LOCAL(uint16_t, u16Value);
6148 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
6149 IEM_MC_PUSH_U16(u16Value);
6150 IEM_MC_ADVANCE_RIP();
6151 IEM_MC_END();
6152 break;
6153
6154 case IEMMODE_32BIT:
6155 IEM_MC_BEGIN(0, 1);
6156 IEM_MC_LOCAL(uint32_t, u32Value);
6157 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
6158 IEM_MC_PUSH_U32_SREG(u32Value);
6159 IEM_MC_ADVANCE_RIP();
6160 IEM_MC_END();
6161 break;
6162
6163 case IEMMODE_64BIT:
6164 IEM_MC_BEGIN(0, 1);
6165 IEM_MC_LOCAL(uint64_t, u64Value);
6166 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
6167 IEM_MC_PUSH_U64(u64Value);
6168 IEM_MC_ADVANCE_RIP();
6169 IEM_MC_END();
6170 break;
6171 }
6172
6173 return VINF_SUCCESS;
6174}
6175
6176
6177/** Opcode 0x0f 0xa0. */
6178FNIEMOP_DEF(iemOp_push_fs)
6179{
6180 IEMOP_MNEMONIC(push_fs, "push fs");
6181 IEMOP_HLP_MIN_386();
6182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6183 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
6184}
6185
6186
6187/** Opcode 0x0f 0xa1. */
6188FNIEMOP_DEF(iemOp_pop_fs)
6189{
6190 IEMOP_MNEMONIC(pop_fs, "pop fs");
6191 IEMOP_HLP_MIN_386();
6192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6193 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
6194}
6195
6196
6197/** Opcode 0x0f 0xa2. */
6198FNIEMOP_DEF(iemOp_cpuid)
6199{
6200 IEMOP_MNEMONIC(cpuid, "cpuid");
6201 IEMOP_HLP_MIN_486(); /* not all 486es. */
6202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6203 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
6204}
6205
6206
6207/**
6208 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
6209 * iemOp_bts_Ev_Gv.
6210 */
6211FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
6212{
6213 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6214 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6215
6216 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6217 {
6218 /* register destination. */
6219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6220 switch (pVCpu->iem.s.enmEffOpSize)
6221 {
6222 case IEMMODE_16BIT:
6223 IEM_MC_BEGIN(3, 0);
6224 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6225 IEM_MC_ARG(uint16_t, u16Src, 1);
6226 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6227
6228 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6229 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
6230 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6231 IEM_MC_REF_EFLAGS(pEFlags);
6232 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6233
6234 IEM_MC_ADVANCE_RIP();
6235 IEM_MC_END();
6236 return VINF_SUCCESS;
6237
6238 case IEMMODE_32BIT:
6239 IEM_MC_BEGIN(3, 0);
6240 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6241 IEM_MC_ARG(uint32_t, u32Src, 1);
6242 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6243
6244 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6245 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
6246 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6247 IEM_MC_REF_EFLAGS(pEFlags);
6248 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6249
6250 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6251 IEM_MC_ADVANCE_RIP();
6252 IEM_MC_END();
6253 return VINF_SUCCESS;
6254
6255 case IEMMODE_64BIT:
6256 IEM_MC_BEGIN(3, 0);
6257 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6258 IEM_MC_ARG(uint64_t, u64Src, 1);
6259 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6260
6261 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6262 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
6263 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6264 IEM_MC_REF_EFLAGS(pEFlags);
6265 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6266
6267 IEM_MC_ADVANCE_RIP();
6268 IEM_MC_END();
6269 return VINF_SUCCESS;
6270
6271 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6272 }
6273 }
6274 else
6275 {
6276 /* memory destination. */
6277
6278 uint32_t fAccess;
6279 if (pImpl->pfnLockedU16)
6280 fAccess = IEM_ACCESS_DATA_RW;
6281 else /* BT */
6282 fAccess = IEM_ACCESS_DATA_R;
6283
6284 /** @todo test negative bit offsets! */
6285 switch (pVCpu->iem.s.enmEffOpSize)
6286 {
6287 case IEMMODE_16BIT:
6288 IEM_MC_BEGIN(3, 2);
6289 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6290 IEM_MC_ARG(uint16_t, u16Src, 1);
6291 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6293 IEM_MC_LOCAL(int16_t, i16AddrAdj);
6294
6295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6296 if (pImpl->pfnLockedU16)
6297 IEMOP_HLP_DONE_DECODING();
6298 else
6299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6300 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6301 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
6302 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
6303 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
6304 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
6305 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
6306 IEM_MC_FETCH_EFLAGS(EFlags);
6307
6308 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6309 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6310 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6311 else
6312 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6313 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6314
6315 IEM_MC_COMMIT_EFLAGS(EFlags);
6316 IEM_MC_ADVANCE_RIP();
6317 IEM_MC_END();
6318 return VINF_SUCCESS;
6319
6320 case IEMMODE_32BIT:
6321 IEM_MC_BEGIN(3, 2);
6322 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6323 IEM_MC_ARG(uint32_t, u32Src, 1);
6324 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6326 IEM_MC_LOCAL(int32_t, i32AddrAdj);
6327
6328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6329 if (pImpl->pfnLockedU16)
6330 IEMOP_HLP_DONE_DECODING();
6331 else
6332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6333 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6334 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
6335 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
6336 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
6337 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
6338 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
6339 IEM_MC_FETCH_EFLAGS(EFlags);
6340
6341 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6342 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6343 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6344 else
6345 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6346 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6347
6348 IEM_MC_COMMIT_EFLAGS(EFlags);
6349 IEM_MC_ADVANCE_RIP();
6350 IEM_MC_END();
6351 return VINF_SUCCESS;
6352
6353 case IEMMODE_64BIT:
6354 IEM_MC_BEGIN(3, 2);
6355 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6356 IEM_MC_ARG(uint64_t, u64Src, 1);
6357 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6359 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6360
6361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6362 if (pImpl->pfnLockedU16)
6363 IEMOP_HLP_DONE_DECODING();
6364 else
6365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6366 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6367 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6368 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6369 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6370 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6371 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6372 IEM_MC_FETCH_EFLAGS(EFlags);
6373
6374 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6375 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6376 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6377 else
6378 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6379 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6380
6381 IEM_MC_COMMIT_EFLAGS(EFlags);
6382 IEM_MC_ADVANCE_RIP();
6383 IEM_MC_END();
6384 return VINF_SUCCESS;
6385
6386 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6387 }
6388 }
6389}
6390
6391
6392/** Opcode 0x0f 0xa3. */
6393FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6394{
6395 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6396 IEMOP_HLP_MIN_386();
6397 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6398}
6399
6400
6401/**
6402 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6403 */
6404FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6405{
6406 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6407 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6408
6409 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6410 {
6411 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6413
6414 switch (pVCpu->iem.s.enmEffOpSize)
6415 {
6416 case IEMMODE_16BIT:
6417 IEM_MC_BEGIN(4, 0);
6418 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6419 IEM_MC_ARG(uint16_t, u16Src, 1);
6420 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6421 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6422
6423 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6424 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6425 IEM_MC_REF_EFLAGS(pEFlags);
6426 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6427
6428 IEM_MC_ADVANCE_RIP();
6429 IEM_MC_END();
6430 return VINF_SUCCESS;
6431
6432 case IEMMODE_32BIT:
6433 IEM_MC_BEGIN(4, 0);
6434 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6435 IEM_MC_ARG(uint32_t, u32Src, 1);
6436 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6437 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6438
6439 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6440 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6441 IEM_MC_REF_EFLAGS(pEFlags);
6442 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6443
6444 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6445 IEM_MC_ADVANCE_RIP();
6446 IEM_MC_END();
6447 return VINF_SUCCESS;
6448
6449 case IEMMODE_64BIT:
6450 IEM_MC_BEGIN(4, 0);
6451 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6452 IEM_MC_ARG(uint64_t, u64Src, 1);
6453 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6454 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6455
6456 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6457 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6458 IEM_MC_REF_EFLAGS(pEFlags);
6459 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6460
6461 IEM_MC_ADVANCE_RIP();
6462 IEM_MC_END();
6463 return VINF_SUCCESS;
6464
6465 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6466 }
6467 }
6468 else
6469 {
6470 switch (pVCpu->iem.s.enmEffOpSize)
6471 {
6472 case IEMMODE_16BIT:
6473 IEM_MC_BEGIN(4, 2);
6474 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6475 IEM_MC_ARG(uint16_t, u16Src, 1);
6476 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6477 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6478 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6479
6480 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6481 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6482 IEM_MC_ASSIGN(cShiftArg, cShift);
6483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6484 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6485 IEM_MC_FETCH_EFLAGS(EFlags);
6486 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6487 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6488
6489 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6490 IEM_MC_COMMIT_EFLAGS(EFlags);
6491 IEM_MC_ADVANCE_RIP();
6492 IEM_MC_END();
6493 return VINF_SUCCESS;
6494
6495 case IEMMODE_32BIT:
6496 IEM_MC_BEGIN(4, 2);
6497 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6498 IEM_MC_ARG(uint32_t, u32Src, 1);
6499 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6500 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6502
6503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6504 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6505 IEM_MC_ASSIGN(cShiftArg, cShift);
6506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6507 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6508 IEM_MC_FETCH_EFLAGS(EFlags);
6509 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6510 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6511
6512 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6513 IEM_MC_COMMIT_EFLAGS(EFlags);
6514 IEM_MC_ADVANCE_RIP();
6515 IEM_MC_END();
6516 return VINF_SUCCESS;
6517
6518 case IEMMODE_64BIT:
6519 IEM_MC_BEGIN(4, 2);
6520 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6521 IEM_MC_ARG(uint64_t, u64Src, 1);
6522 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6523 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6525
6526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6527 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6528 IEM_MC_ASSIGN(cShiftArg, cShift);
6529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6530 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6531 IEM_MC_FETCH_EFLAGS(EFlags);
6532 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6533 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6534
6535 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6536 IEM_MC_COMMIT_EFLAGS(EFlags);
6537 IEM_MC_ADVANCE_RIP();
6538 IEM_MC_END();
6539 return VINF_SUCCESS;
6540
6541 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6542 }
6543 }
6544}
6545
6546
6547/**
6548 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6549 */
6550FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6551{
6552 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6553 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6554
6555 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6556 {
6557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6558
6559 switch (pVCpu->iem.s.enmEffOpSize)
6560 {
6561 case IEMMODE_16BIT:
6562 IEM_MC_BEGIN(4, 0);
6563 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6564 IEM_MC_ARG(uint16_t, u16Src, 1);
6565 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6566 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6567
6568 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6569 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6570 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6571 IEM_MC_REF_EFLAGS(pEFlags);
6572 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6573
6574 IEM_MC_ADVANCE_RIP();
6575 IEM_MC_END();
6576 return VINF_SUCCESS;
6577
6578 case IEMMODE_32BIT:
6579 IEM_MC_BEGIN(4, 0);
6580 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6581 IEM_MC_ARG(uint32_t, u32Src, 1);
6582 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6583 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6584
6585 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6586 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6587 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6588 IEM_MC_REF_EFLAGS(pEFlags);
6589 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6590
6591 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6592 IEM_MC_ADVANCE_RIP();
6593 IEM_MC_END();
6594 return VINF_SUCCESS;
6595
6596 case IEMMODE_64BIT:
6597 IEM_MC_BEGIN(4, 0);
6598 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6599 IEM_MC_ARG(uint64_t, u64Src, 1);
6600 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6601 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6602
6603 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6604 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6605 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6606 IEM_MC_REF_EFLAGS(pEFlags);
6607 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6608
6609 IEM_MC_ADVANCE_RIP();
6610 IEM_MC_END();
6611 return VINF_SUCCESS;
6612
6613 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6614 }
6615 }
6616 else
6617 {
6618 switch (pVCpu->iem.s.enmEffOpSize)
6619 {
6620 case IEMMODE_16BIT:
6621 IEM_MC_BEGIN(4, 2);
6622 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6623 IEM_MC_ARG(uint16_t, u16Src, 1);
6624 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6625 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6626 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6627
6628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6630 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6631 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6632 IEM_MC_FETCH_EFLAGS(EFlags);
6633 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6634 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6635
6636 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6637 IEM_MC_COMMIT_EFLAGS(EFlags);
6638 IEM_MC_ADVANCE_RIP();
6639 IEM_MC_END();
6640 return VINF_SUCCESS;
6641
6642 case IEMMODE_32BIT:
6643 IEM_MC_BEGIN(4, 2);
6644 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6645 IEM_MC_ARG(uint32_t, u32Src, 1);
6646 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6647 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6649
6650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6652 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6653 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6654 IEM_MC_FETCH_EFLAGS(EFlags);
6655 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6656 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6657
6658 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6659 IEM_MC_COMMIT_EFLAGS(EFlags);
6660 IEM_MC_ADVANCE_RIP();
6661 IEM_MC_END();
6662 return VINF_SUCCESS;
6663
6664 case IEMMODE_64BIT:
6665 IEM_MC_BEGIN(4, 2);
6666 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6667 IEM_MC_ARG(uint64_t, u64Src, 1);
6668 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6669 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6671
6672 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6674 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6675 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6676 IEM_MC_FETCH_EFLAGS(EFlags);
6677 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6678 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6679
6680 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6681 IEM_MC_COMMIT_EFLAGS(EFlags);
6682 IEM_MC_ADVANCE_RIP();
6683 IEM_MC_END();
6684 return VINF_SUCCESS;
6685
6686 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6687 }
6688 }
6689}
6690
6691
6692
6693/** Opcode 0x0f 0xa4. */
6694FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6695{
6696 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6697 IEMOP_HLP_MIN_386();
6698 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6699}
6700
6701
6702/** Opcode 0x0f 0xa5. */
6703FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6704{
6705 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6706 IEMOP_HLP_MIN_386();
6707 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6708}
6709
6710
6711/** Opcode 0x0f 0xa8. */
6712FNIEMOP_DEF(iemOp_push_gs)
6713{
6714 IEMOP_MNEMONIC(push_gs, "push gs");
6715 IEMOP_HLP_MIN_386();
6716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6717 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6718}
6719
6720
6721/** Opcode 0x0f 0xa9. */
6722FNIEMOP_DEF(iemOp_pop_gs)
6723{
6724 IEMOP_MNEMONIC(pop_gs, "pop gs");
6725 IEMOP_HLP_MIN_386();
6726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6727 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6728}
6729
6730
6731/** Opcode 0x0f 0xaa. */
6732FNIEMOP_DEF(iemOp_rsm)
6733{
6734 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
6735 IEMOP_HLP_MIN_386(); /* 386SL and later. */
6736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6737 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
6738}
6739
6740
6741
6742/** Opcode 0x0f 0xab. */
6743FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6744{
6745 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6746 IEMOP_HLP_MIN_386();
6747 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6748}
6749
6750
6751/** Opcode 0x0f 0xac. */
6752FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6753{
6754 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6755 IEMOP_HLP_MIN_386();
6756 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6757}
6758
6759
6760/** Opcode 0x0f 0xad. */
6761FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6762{
6763 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6764 IEMOP_HLP_MIN_386();
6765 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6766}
6767
6768
6769/** Opcode 0x0f 0xae mem/0. */
6770FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6771{
6772 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6773 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6774 return IEMOP_RAISE_INVALID_OPCODE();
6775
6776 IEM_MC_BEGIN(3, 1);
6777 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6778 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6779 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6782 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6783 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6784 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6785 IEM_MC_END();
6786 return VINF_SUCCESS;
6787}
6788
6789
6790/** Opcode 0x0f 0xae mem/1. */
6791FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6792{
6793 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6794 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6795 return IEMOP_RAISE_INVALID_OPCODE();
6796
6797 IEM_MC_BEGIN(3, 1);
6798 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6799 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6800 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6803 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6804 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6805 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6806 IEM_MC_END();
6807 return VINF_SUCCESS;
6808}
6809
6810
6811/**
6812 * @opmaps grp15
6813 * @opcode !11/2
6814 * @oppfx none
6815 * @opcpuid sse
6816 * @opgroup og_sse_mxcsrsm
6817 * @opxcpttype 5
6818 * @optest op1=0 -> mxcsr=0
6819 * @optest op1=0x2083 -> mxcsr=0x2083
6820 * @optest op1=0xfffffffe -> value.xcpt=0xd
6821 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6822 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6823 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6824 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6825 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6826 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6827 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6828 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6829 */
6830FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6831{
6832 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6833 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6834 return IEMOP_RAISE_INVALID_OPCODE();
6835
6836 IEM_MC_BEGIN(2, 0);
6837 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6838 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6841 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6842 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6843 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6844 IEM_MC_END();
6845 return VINF_SUCCESS;
6846}
6847
6848
6849/**
6850 * @opmaps grp15
6851 * @opcode !11/3
6852 * @oppfx none
6853 * @opcpuid sse
6854 * @opgroup og_sse_mxcsrsm
6855 * @opxcpttype 5
6856 * @optest mxcsr=0 -> op1=0
6857 * @optest mxcsr=0x2083 -> op1=0x2083
6858 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6859 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6860 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6861 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6862 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6863 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6864 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6865 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6866 */
6867FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6868{
6869 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6870 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6871 return IEMOP_RAISE_INVALID_OPCODE();
6872
6873 IEM_MC_BEGIN(2, 0);
6874 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6875 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6878 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6879 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6880 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6881 IEM_MC_END();
6882 return VINF_SUCCESS;
6883}
6884
6885
6886/**
6887 * @opmaps grp15
6888 * @opcode !11/4
6889 * @oppfx none
6890 * @opcpuid xsave
6891 * @opgroup og_system
6892 * @opxcpttype none
6893 */
6894FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6895{
6896 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
6897 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6898 return IEMOP_RAISE_INVALID_OPCODE();
6899
6900 IEM_MC_BEGIN(3, 0);
6901 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6902 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6903 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6906 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6907 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6908 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6909 IEM_MC_END();
6910 return VINF_SUCCESS;
6911}
6912
6913
6914/**
6915 * @opmaps grp15
6916 * @opcode !11/5
6917 * @oppfx none
6918 * @opcpuid xsave
6919 * @opgroup og_system
6920 * @opxcpttype none
6921 */
6922FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6923{
6924 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
6925 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6926 return IEMOP_RAISE_INVALID_OPCODE();
6927
6928 IEM_MC_BEGIN(3, 0);
6929 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6930 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6931 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6934 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6935 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6936 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6937 IEM_MC_END();
6938 return VINF_SUCCESS;
6939}
6940
6941/** Opcode 0x0f 0xae mem/6. */
6942FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6943
6944/**
6945 * @opmaps grp15
6946 * @opcode !11/7
6947 * @oppfx none
6948 * @opcpuid clfsh
6949 * @opgroup og_cachectl
6950 * @optest op1=1 ->
6951 */
6952FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6953{
6954 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6955 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6956 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6957
6958 IEM_MC_BEGIN(2, 0);
6959 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6960 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6963 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6964 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6965 IEM_MC_END();
6966 return VINF_SUCCESS;
6967}
6968
6969/**
6970 * @opmaps grp15
6971 * @opcode !11/7
6972 * @oppfx 0x66
6973 * @opcpuid clflushopt
6974 * @opgroup og_cachectl
6975 * @optest op1=1 ->
6976 */
6977FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6978{
6979 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6980 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6981 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6982
6983 IEM_MC_BEGIN(2, 0);
6984 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6985 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6988 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6989 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6990 IEM_MC_END();
6991 return VINF_SUCCESS;
6992}
6993
6994
6995/** Opcode 0x0f 0xae 11b/5. */
6996FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6997{
6998 RT_NOREF_PV(bRm);
6999 IEMOP_MNEMONIC(lfence, "lfence");
7000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7001 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7002 return IEMOP_RAISE_INVALID_OPCODE();
7003
7004 IEM_MC_BEGIN(0, 0);
7005 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7006 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
7007 else
7008 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7009 IEM_MC_ADVANCE_RIP();
7010 IEM_MC_END();
7011 return VINF_SUCCESS;
7012}
7013
7014
7015/** Opcode 0x0f 0xae 11b/6. */
7016FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
7017{
7018 RT_NOREF_PV(bRm);
7019 IEMOP_MNEMONIC(mfence, "mfence");
7020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7021 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7022 return IEMOP_RAISE_INVALID_OPCODE();
7023
7024 IEM_MC_BEGIN(0, 0);
7025 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7026 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
7027 else
7028 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7029 IEM_MC_ADVANCE_RIP();
7030 IEM_MC_END();
7031 return VINF_SUCCESS;
7032}
7033
7034
7035/** Opcode 0x0f 0xae 11b/7. */
7036FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
7037{
7038 RT_NOREF_PV(bRm);
7039 IEMOP_MNEMONIC(sfence, "sfence");
7040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7041 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7042 return IEMOP_RAISE_INVALID_OPCODE();
7043
7044 IEM_MC_BEGIN(0, 0);
7045 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7046 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
7047 else
7048 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7049 IEM_MC_ADVANCE_RIP();
7050 IEM_MC_END();
7051 return VINF_SUCCESS;
7052}
7053
7054
7055/** Opcode 0xf3 0x0f 0xae 11b/0. */
7056FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
7057{
7058 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
7059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7060 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7061 {
7062 IEM_MC_BEGIN(1, 0);
7063 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7064 IEM_MC_ARG(uint64_t, u64Dst, 0);
7065 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
7066 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
7067 IEM_MC_ADVANCE_RIP();
7068 IEM_MC_END();
7069 }
7070 else
7071 {
7072 IEM_MC_BEGIN(1, 0);
7073 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7074 IEM_MC_ARG(uint32_t, u32Dst, 0);
7075 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
7076 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
7077 IEM_MC_ADVANCE_RIP();
7078 IEM_MC_END();
7079 }
7080 return VINF_SUCCESS;
7081}
7082
7083
7084/** Opcode 0xf3 0x0f 0xae 11b/1. */
7085FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
7086{
7087 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
7088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7089 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7090 {
7091 IEM_MC_BEGIN(1, 0);
7092 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7093 IEM_MC_ARG(uint64_t, u64Dst, 0);
7094 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
7095 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
7096 IEM_MC_ADVANCE_RIP();
7097 IEM_MC_END();
7098 }
7099 else
7100 {
7101 IEM_MC_BEGIN(1, 0);
7102 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7103 IEM_MC_ARG(uint32_t, u32Dst, 0);
7104 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
7105 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
7106 IEM_MC_ADVANCE_RIP();
7107 IEM_MC_END();
7108 }
7109 return VINF_SUCCESS;
7110}
7111
7112
7113/** Opcode 0xf3 0x0f 0xae 11b/2. */
7114FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
7115{
7116 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
7117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7118 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7119 {
7120 IEM_MC_BEGIN(1, 0);
7121 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7122 IEM_MC_ARG(uint64_t, u64Dst, 0);
7123 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7124 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7125 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
7126 IEM_MC_ADVANCE_RIP();
7127 IEM_MC_END();
7128 }
7129 else
7130 {
7131 IEM_MC_BEGIN(1, 0);
7132 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7133 IEM_MC_ARG(uint32_t, u32Dst, 0);
7134 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7135 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
7136 IEM_MC_ADVANCE_RIP();
7137 IEM_MC_END();
7138 }
7139 return VINF_SUCCESS;
7140}
7141
7142
7143/** Opcode 0xf3 0x0f 0xae 11b/3. */
7144FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
7145{
7146 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
7147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7148 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7149 {
7150 IEM_MC_BEGIN(1, 0);
7151 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7152 IEM_MC_ARG(uint64_t, u64Dst, 0);
7153 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7154 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7155 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
7156 IEM_MC_ADVANCE_RIP();
7157 IEM_MC_END();
7158 }
7159 else
7160 {
7161 IEM_MC_BEGIN(1, 0);
7162 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7163 IEM_MC_ARG(uint32_t, u32Dst, 0);
7164 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7165 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
7166 IEM_MC_ADVANCE_RIP();
7167 IEM_MC_END();
7168 }
7169 return VINF_SUCCESS;
7170}
7171
7172
7173/**
7174 * Group 15 jump table for register variant.
7175 */
7176IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
7177{ /* pfx: none, 066h, 0f3h, 0f2h */
7178 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
7179 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
7180 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
7181 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
7182 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7183 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7184 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7185 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7186};
7187AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
7188
7189
7190/**
7191 * Group 15 jump table for memory variant.
7192 */
7193IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
7194{ /* pfx: none, 066h, 0f3h, 0f2h */
7195 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7196 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7197 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7198 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7199 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7200 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7201 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7202 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7203};
7204AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
7205
7206
7207/** Opcode 0x0f 0xae. */
7208FNIEMOP_DEF(iemOp_Grp15)
7209{
7210 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
7211 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7212 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7213 /* register, register */
7214 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7215 + pVCpu->iem.s.idxPrefix], bRm);
7216 /* memory, register */
7217 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7218 + pVCpu->iem.s.idxPrefix], bRm);
7219}
7220
7221
7222/** Opcode 0x0f 0xaf. */
7223FNIEMOP_DEF(iemOp_imul_Gv_Ev)
7224{
7225 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
7226 IEMOP_HLP_MIN_386();
7227 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7228 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
7229}
7230
7231
7232/** Opcode 0x0f 0xb0. */
7233FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
7234{
7235 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
7236 IEMOP_HLP_MIN_486();
7237 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7238
7239 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7240 {
7241 IEMOP_HLP_DONE_DECODING();
7242 IEM_MC_BEGIN(4, 0);
7243 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7244 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7245 IEM_MC_ARG(uint8_t, u8Src, 2);
7246 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7247
7248 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7249 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7250 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
7251 IEM_MC_REF_EFLAGS(pEFlags);
7252 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7253 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7254 else
7255 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7256
7257 IEM_MC_ADVANCE_RIP();
7258 IEM_MC_END();
7259 }
7260 else
7261 {
7262 IEM_MC_BEGIN(4, 3);
7263 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7264 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7265 IEM_MC_ARG(uint8_t, u8Src, 2);
7266 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7268 IEM_MC_LOCAL(uint8_t, u8Al);
7269
7270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7271 IEMOP_HLP_DONE_DECODING();
7272 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7273 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7274 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
7275 IEM_MC_FETCH_EFLAGS(EFlags);
7276 IEM_MC_REF_LOCAL(pu8Al, u8Al);
7277 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7278 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7279 else
7280 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7281
7282 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7283 IEM_MC_COMMIT_EFLAGS(EFlags);
7284 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
7285 IEM_MC_ADVANCE_RIP();
7286 IEM_MC_END();
7287 }
7288 return VINF_SUCCESS;
7289}
7290
7291/** Opcode 0x0f 0xb1. */
7292FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
7293{
7294 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
7295 IEMOP_HLP_MIN_486();
7296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7297
7298 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7299 {
7300 IEMOP_HLP_DONE_DECODING();
7301 switch (pVCpu->iem.s.enmEffOpSize)
7302 {
7303 case IEMMODE_16BIT:
7304 IEM_MC_BEGIN(4, 0);
7305 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7306 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7307 IEM_MC_ARG(uint16_t, u16Src, 2);
7308 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7309
7310 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7311 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7312 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
7313 IEM_MC_REF_EFLAGS(pEFlags);
7314 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7315 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7316 else
7317 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7318
7319 IEM_MC_ADVANCE_RIP();
7320 IEM_MC_END();
7321 return VINF_SUCCESS;
7322
7323 case IEMMODE_32BIT:
7324 IEM_MC_BEGIN(4, 0);
7325 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7326 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7327 IEM_MC_ARG(uint32_t, u32Src, 2);
7328 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7329
7330 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7331 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7332 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
7333 IEM_MC_REF_EFLAGS(pEFlags);
7334 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7335 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7336 else
7337 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7338
7339 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
7340 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7341 IEM_MC_ADVANCE_RIP();
7342 IEM_MC_END();
7343 return VINF_SUCCESS;
7344
7345 case IEMMODE_64BIT:
7346 IEM_MC_BEGIN(4, 0);
7347 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7348 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7349#ifdef RT_ARCH_X86
7350 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7351#else
7352 IEM_MC_ARG(uint64_t, u64Src, 2);
7353#endif
7354 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7355
7356 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7357 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
7358 IEM_MC_REF_EFLAGS(pEFlags);
7359#ifdef RT_ARCH_X86
7360 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7361 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7362 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7363 else
7364 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7365#else
7366 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7367 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7368 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7369 else
7370 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7371#endif
7372
7373 IEM_MC_ADVANCE_RIP();
7374 IEM_MC_END();
7375 return VINF_SUCCESS;
7376
7377 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7378 }
7379 }
7380 else
7381 {
7382 switch (pVCpu->iem.s.enmEffOpSize)
7383 {
7384 case IEMMODE_16BIT:
7385 IEM_MC_BEGIN(4, 3);
7386 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7387 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7388 IEM_MC_ARG(uint16_t, u16Src, 2);
7389 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7391 IEM_MC_LOCAL(uint16_t, u16Ax);
7392
7393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7394 IEMOP_HLP_DONE_DECODING();
7395 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7396 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7397 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
7398 IEM_MC_FETCH_EFLAGS(EFlags);
7399 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
7400 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7401 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7402 else
7403 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7404
7405 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7406 IEM_MC_COMMIT_EFLAGS(EFlags);
7407 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
7408 IEM_MC_ADVANCE_RIP();
7409 IEM_MC_END();
7410 return VINF_SUCCESS;
7411
7412 case IEMMODE_32BIT:
7413 IEM_MC_BEGIN(4, 3);
7414 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7415 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7416 IEM_MC_ARG(uint32_t, u32Src, 2);
7417 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7418 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7419 IEM_MC_LOCAL(uint32_t, u32Eax);
7420
7421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7422 IEMOP_HLP_DONE_DECODING();
7423 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7424 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7425 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
7426 IEM_MC_FETCH_EFLAGS(EFlags);
7427 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
7428 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7429 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7430 else
7431 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7432
7433 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7434 IEM_MC_COMMIT_EFLAGS(EFlags);
7435 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
7436 IEM_MC_ADVANCE_RIP();
7437 IEM_MC_END();
7438 return VINF_SUCCESS;
7439
7440 case IEMMODE_64BIT:
7441 IEM_MC_BEGIN(4, 3);
7442 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7443 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7444#ifdef RT_ARCH_X86
7445 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7446#else
7447 IEM_MC_ARG(uint64_t, u64Src, 2);
7448#endif
7449 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7451 IEM_MC_LOCAL(uint64_t, u64Rax);
7452
7453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7454 IEMOP_HLP_DONE_DECODING();
7455 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7456 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
7457 IEM_MC_FETCH_EFLAGS(EFlags);
7458 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
7459#ifdef RT_ARCH_X86
7460 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7461 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7462 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7463 else
7464 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7465#else
7466 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7467 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7468 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7469 else
7470 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7471#endif
7472
7473 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7474 IEM_MC_COMMIT_EFLAGS(EFlags);
7475 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
7476 IEM_MC_ADVANCE_RIP();
7477 IEM_MC_END();
7478 return VINF_SUCCESS;
7479
7480 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7481 }
7482 }
7483}
7484
7485
7486FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
7487{
7488 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
7489 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
7490
7491 switch (pVCpu->iem.s.enmEffOpSize)
7492 {
7493 case IEMMODE_16BIT:
7494 IEM_MC_BEGIN(5, 1);
7495 IEM_MC_ARG(uint16_t, uSel, 0);
7496 IEM_MC_ARG(uint16_t, offSeg, 1);
7497 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7498 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7499 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7500 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7503 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7504 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
7505 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7506 IEM_MC_END();
7507 return VINF_SUCCESS;
7508
7509 case IEMMODE_32BIT:
7510 IEM_MC_BEGIN(5, 1);
7511 IEM_MC_ARG(uint16_t, uSel, 0);
7512 IEM_MC_ARG(uint32_t, offSeg, 1);
7513 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7514 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7515 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7516 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7519 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7520 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
7521 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7522 IEM_MC_END();
7523 return VINF_SUCCESS;
7524
7525 case IEMMODE_64BIT:
7526 IEM_MC_BEGIN(5, 1);
7527 IEM_MC_ARG(uint16_t, uSel, 0);
7528 IEM_MC_ARG(uint64_t, offSeg, 1);
7529 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7530 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7531 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7532 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7533 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7535 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
7536 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7537 else
7538 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7539 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
7540 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7541 IEM_MC_END();
7542 return VINF_SUCCESS;
7543
7544 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7545 }
7546}
7547
7548
7549/** Opcode 0x0f 0xb2. */
7550FNIEMOP_DEF(iemOp_lss_Gv_Mp)
7551{
7552 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
7553 IEMOP_HLP_MIN_386();
7554 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7555 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7556 return IEMOP_RAISE_INVALID_OPCODE();
7557 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
7558}
7559
7560
7561/** Opcode 0x0f 0xb3. */
7562FNIEMOP_DEF(iemOp_btr_Ev_Gv)
7563{
7564 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
7565 IEMOP_HLP_MIN_386();
7566 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
7567}
7568
7569
7570/** Opcode 0x0f 0xb4. */
7571FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
7572{
7573 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
7574 IEMOP_HLP_MIN_386();
7575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7576 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7577 return IEMOP_RAISE_INVALID_OPCODE();
7578 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
7579}
7580
7581
7582/** Opcode 0x0f 0xb5. */
7583FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
7584{
7585 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
7586 IEMOP_HLP_MIN_386();
7587 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7588 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7589 return IEMOP_RAISE_INVALID_OPCODE();
7590 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
7591}
7592
7593
7594/** Opcode 0x0f 0xb6. */
7595FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
7596{
7597 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
7598 IEMOP_HLP_MIN_386();
7599
7600 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7601
7602 /*
7603 * If rm is denoting a register, no more instruction bytes.
7604 */
7605 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7606 {
7607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7608 switch (pVCpu->iem.s.enmEffOpSize)
7609 {
7610 case IEMMODE_16BIT:
7611 IEM_MC_BEGIN(0, 1);
7612 IEM_MC_LOCAL(uint16_t, u16Value);
7613 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7614 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7615 IEM_MC_ADVANCE_RIP();
7616 IEM_MC_END();
7617 return VINF_SUCCESS;
7618
7619 case IEMMODE_32BIT:
7620 IEM_MC_BEGIN(0, 1);
7621 IEM_MC_LOCAL(uint32_t, u32Value);
7622 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7623 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7624 IEM_MC_ADVANCE_RIP();
7625 IEM_MC_END();
7626 return VINF_SUCCESS;
7627
7628 case IEMMODE_64BIT:
7629 IEM_MC_BEGIN(0, 1);
7630 IEM_MC_LOCAL(uint64_t, u64Value);
7631 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7632 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7633 IEM_MC_ADVANCE_RIP();
7634 IEM_MC_END();
7635 return VINF_SUCCESS;
7636
7637 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7638 }
7639 }
7640 else
7641 {
7642 /*
7643 * We're loading a register from memory.
7644 */
7645 switch (pVCpu->iem.s.enmEffOpSize)
7646 {
7647 case IEMMODE_16BIT:
7648 IEM_MC_BEGIN(0, 2);
7649 IEM_MC_LOCAL(uint16_t, u16Value);
7650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7653 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7654 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7655 IEM_MC_ADVANCE_RIP();
7656 IEM_MC_END();
7657 return VINF_SUCCESS;
7658
7659 case IEMMODE_32BIT:
7660 IEM_MC_BEGIN(0, 2);
7661 IEM_MC_LOCAL(uint32_t, u32Value);
7662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7665 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7666 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7667 IEM_MC_ADVANCE_RIP();
7668 IEM_MC_END();
7669 return VINF_SUCCESS;
7670
7671 case IEMMODE_64BIT:
7672 IEM_MC_BEGIN(0, 2);
7673 IEM_MC_LOCAL(uint64_t, u64Value);
7674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7677 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7678 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7679 IEM_MC_ADVANCE_RIP();
7680 IEM_MC_END();
7681 return VINF_SUCCESS;
7682
7683 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7684 }
7685 }
7686}
7687
7688
7689/** Opcode 0x0f 0xb7. */
7690FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7691{
7692 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7693 IEMOP_HLP_MIN_386();
7694
7695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7696
7697 /** @todo Not entirely sure how the operand size prefix is handled here,
7698 * assuming that it will be ignored. Would be nice to have a few
7699 * test for this. */
7700 /*
7701 * If rm is denoting a register, no more instruction bytes.
7702 */
7703 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7704 {
7705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7706 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7707 {
7708 IEM_MC_BEGIN(0, 1);
7709 IEM_MC_LOCAL(uint32_t, u32Value);
7710 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7711 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7712 IEM_MC_ADVANCE_RIP();
7713 IEM_MC_END();
7714 }
7715 else
7716 {
7717 IEM_MC_BEGIN(0, 1);
7718 IEM_MC_LOCAL(uint64_t, u64Value);
7719 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7720 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7721 IEM_MC_ADVANCE_RIP();
7722 IEM_MC_END();
7723 }
7724 }
7725 else
7726 {
7727 /*
7728 * We're loading a register from memory.
7729 */
7730 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7731 {
7732 IEM_MC_BEGIN(0, 2);
7733 IEM_MC_LOCAL(uint32_t, u32Value);
7734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7737 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7738 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7739 IEM_MC_ADVANCE_RIP();
7740 IEM_MC_END();
7741 }
7742 else
7743 {
7744 IEM_MC_BEGIN(0, 2);
7745 IEM_MC_LOCAL(uint64_t, u64Value);
7746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7749 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7750 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7751 IEM_MC_ADVANCE_RIP();
7752 IEM_MC_END();
7753 }
7754 }
7755 return VINF_SUCCESS;
7756}
7757
7758
7759/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7760FNIEMOP_UD_STUB(iemOp_jmpe);
7761/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7762FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7763
7764
7765/**
7766 * @opcode 0xb9
7767 * @opinvalid intel-modrm
7768 * @optest ->
7769 */
7770FNIEMOP_DEF(iemOp_Grp10)
7771{
7772 /*
7773 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7774 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7775 */
7776 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7777 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
7778 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7779}
7780
7781
7782/** Opcode 0x0f 0xba. */
7783FNIEMOP_DEF(iemOp_Grp8)
7784{
7785 IEMOP_HLP_MIN_386();
7786 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7787 PCIEMOPBINSIZES pImpl;
7788 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7789 {
7790 case 0: case 1: case 2: case 3:
7791 /* Both AMD and Intel want full modr/m decoding and imm8. */
7792 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7793 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7794 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7795 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7796 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7797 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7798 }
7799 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7800
7801 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7802 {
7803 /* register destination. */
7804 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7806
7807 switch (pVCpu->iem.s.enmEffOpSize)
7808 {
7809 case IEMMODE_16BIT:
7810 IEM_MC_BEGIN(3, 0);
7811 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7812 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7813 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7814
7815 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7816 IEM_MC_REF_EFLAGS(pEFlags);
7817 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7818
7819 IEM_MC_ADVANCE_RIP();
7820 IEM_MC_END();
7821 return VINF_SUCCESS;
7822
7823 case IEMMODE_32BIT:
7824 IEM_MC_BEGIN(3, 0);
7825 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7826 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7827 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7828
7829 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7830 IEM_MC_REF_EFLAGS(pEFlags);
7831 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7832
7833 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7834 IEM_MC_ADVANCE_RIP();
7835 IEM_MC_END();
7836 return VINF_SUCCESS;
7837
7838 case IEMMODE_64BIT:
7839 IEM_MC_BEGIN(3, 0);
7840 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7841 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7842 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7843
7844 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7845 IEM_MC_REF_EFLAGS(pEFlags);
7846 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7847
7848 IEM_MC_ADVANCE_RIP();
7849 IEM_MC_END();
7850 return VINF_SUCCESS;
7851
7852 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7853 }
7854 }
7855 else
7856 {
7857 /* memory destination. */
7858
7859 uint32_t fAccess;
7860 if (pImpl->pfnLockedU16)
7861 fAccess = IEM_ACCESS_DATA_RW;
7862 else /* BT */
7863 fAccess = IEM_ACCESS_DATA_R;
7864
7865 /** @todo test negative bit offsets! */
7866 switch (pVCpu->iem.s.enmEffOpSize)
7867 {
7868 case IEMMODE_16BIT:
7869 IEM_MC_BEGIN(3, 1);
7870 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7871 IEM_MC_ARG(uint16_t, u16Src, 1);
7872 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7873 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7874
7875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7876 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7877 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7878 if (pImpl->pfnLockedU16)
7879 IEMOP_HLP_DONE_DECODING();
7880 else
7881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7882 IEM_MC_FETCH_EFLAGS(EFlags);
7883 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7884 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7885 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7886 else
7887 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7888 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7889
7890 IEM_MC_COMMIT_EFLAGS(EFlags);
7891 IEM_MC_ADVANCE_RIP();
7892 IEM_MC_END();
7893 return VINF_SUCCESS;
7894
7895 case IEMMODE_32BIT:
7896 IEM_MC_BEGIN(3, 1);
7897 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7898 IEM_MC_ARG(uint32_t, u32Src, 1);
7899 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7901
7902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7903 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7904 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7905 if (pImpl->pfnLockedU16)
7906 IEMOP_HLP_DONE_DECODING();
7907 else
7908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7909 IEM_MC_FETCH_EFLAGS(EFlags);
7910 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7911 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7912 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7913 else
7914 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7915 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7916
7917 IEM_MC_COMMIT_EFLAGS(EFlags);
7918 IEM_MC_ADVANCE_RIP();
7919 IEM_MC_END();
7920 return VINF_SUCCESS;
7921
7922 case IEMMODE_64BIT:
7923 IEM_MC_BEGIN(3, 1);
7924 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7925 IEM_MC_ARG(uint64_t, u64Src, 1);
7926 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7927 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7928
7929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7930 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7931 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7932 if (pImpl->pfnLockedU16)
7933 IEMOP_HLP_DONE_DECODING();
7934 else
7935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7936 IEM_MC_FETCH_EFLAGS(EFlags);
7937 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7938 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7939 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7940 else
7941 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7942 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7943
7944 IEM_MC_COMMIT_EFLAGS(EFlags);
7945 IEM_MC_ADVANCE_RIP();
7946 IEM_MC_END();
7947 return VINF_SUCCESS;
7948
7949 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7950 }
7951 }
7952}
7953
7954
7955/** Opcode 0x0f 0xbb. */
7956FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7957{
7958 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7959 IEMOP_HLP_MIN_386();
7960 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7961}
7962
7963
7964/** Opcode 0x0f 0xbc. */
7965FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7966{
7967 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7968 IEMOP_HLP_MIN_386();
7969 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7970 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7971}
7972
7973
7974/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7975FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7976
7977
7978/** Opcode 0x0f 0xbd. */
7979FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7980{
7981 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7982 IEMOP_HLP_MIN_386();
7983 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7984 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7985}
7986
7987
7988/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7989FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7990
7991
7992/** Opcode 0x0f 0xbe. */
7993FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7994{
7995 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7996 IEMOP_HLP_MIN_386();
7997
7998 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7999
8000 /*
8001 * If rm is denoting a register, no more instruction bytes.
8002 */
8003 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8004 {
8005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8006 switch (pVCpu->iem.s.enmEffOpSize)
8007 {
8008 case IEMMODE_16BIT:
8009 IEM_MC_BEGIN(0, 1);
8010 IEM_MC_LOCAL(uint16_t, u16Value);
8011 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8012 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
8013 IEM_MC_ADVANCE_RIP();
8014 IEM_MC_END();
8015 return VINF_SUCCESS;
8016
8017 case IEMMODE_32BIT:
8018 IEM_MC_BEGIN(0, 1);
8019 IEM_MC_LOCAL(uint32_t, u32Value);
8020 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8021 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8022 IEM_MC_ADVANCE_RIP();
8023 IEM_MC_END();
8024 return VINF_SUCCESS;
8025
8026 case IEMMODE_64BIT:
8027 IEM_MC_BEGIN(0, 1);
8028 IEM_MC_LOCAL(uint64_t, u64Value);
8029 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8030 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8031 IEM_MC_ADVANCE_RIP();
8032 IEM_MC_END();
8033 return VINF_SUCCESS;
8034
8035 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8036 }
8037 }
8038 else
8039 {
8040 /*
8041 * We're loading a register from memory.
8042 */
8043 switch (pVCpu->iem.s.enmEffOpSize)
8044 {
8045 case IEMMODE_16BIT:
8046 IEM_MC_BEGIN(0, 2);
8047 IEM_MC_LOCAL(uint16_t, u16Value);
8048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8051 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8052 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
8053 IEM_MC_ADVANCE_RIP();
8054 IEM_MC_END();
8055 return VINF_SUCCESS;
8056
8057 case IEMMODE_32BIT:
8058 IEM_MC_BEGIN(0, 2);
8059 IEM_MC_LOCAL(uint32_t, u32Value);
8060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8063 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8064 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8065 IEM_MC_ADVANCE_RIP();
8066 IEM_MC_END();
8067 return VINF_SUCCESS;
8068
8069 case IEMMODE_64BIT:
8070 IEM_MC_BEGIN(0, 2);
8071 IEM_MC_LOCAL(uint64_t, u64Value);
8072 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8075 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8076 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8077 IEM_MC_ADVANCE_RIP();
8078 IEM_MC_END();
8079 return VINF_SUCCESS;
8080
8081 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8082 }
8083 }
8084}
8085
8086
8087/** Opcode 0x0f 0xbf. */
8088FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
8089{
8090 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
8091 IEMOP_HLP_MIN_386();
8092
8093 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8094
8095 /** @todo Not entirely sure how the operand size prefix is handled here,
8096 * assuming that it will be ignored. Would be nice to have a few
8097 * test for this. */
8098 /*
8099 * If rm is denoting a register, no more instruction bytes.
8100 */
8101 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8102 {
8103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8104 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8105 {
8106 IEM_MC_BEGIN(0, 1);
8107 IEM_MC_LOCAL(uint32_t, u32Value);
8108 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8109 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8110 IEM_MC_ADVANCE_RIP();
8111 IEM_MC_END();
8112 }
8113 else
8114 {
8115 IEM_MC_BEGIN(0, 1);
8116 IEM_MC_LOCAL(uint64_t, u64Value);
8117 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8118 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8119 IEM_MC_ADVANCE_RIP();
8120 IEM_MC_END();
8121 }
8122 }
8123 else
8124 {
8125 /*
8126 * We're loading a register from memory.
8127 */
8128 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8129 {
8130 IEM_MC_BEGIN(0, 2);
8131 IEM_MC_LOCAL(uint32_t, u32Value);
8132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8135 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8136 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8137 IEM_MC_ADVANCE_RIP();
8138 IEM_MC_END();
8139 }
8140 else
8141 {
8142 IEM_MC_BEGIN(0, 2);
8143 IEM_MC_LOCAL(uint64_t, u64Value);
8144 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8147 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8148 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8149 IEM_MC_ADVANCE_RIP();
8150 IEM_MC_END();
8151 }
8152 }
8153 return VINF_SUCCESS;
8154}
8155
8156
8157/** Opcode 0x0f 0xc0. */
8158FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
8159{
8160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8161 IEMOP_HLP_MIN_486();
8162 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
8163
8164 /*
8165 * If rm is denoting a register, no more instruction bytes.
8166 */
8167 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8168 {
8169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8170
8171 IEM_MC_BEGIN(3, 0);
8172 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8173 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8174 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8175
8176 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8177 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8178 IEM_MC_REF_EFLAGS(pEFlags);
8179 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8180
8181 IEM_MC_ADVANCE_RIP();
8182 IEM_MC_END();
8183 }
8184 else
8185 {
8186 /*
8187 * We're accessing memory.
8188 */
8189 IEM_MC_BEGIN(3, 3);
8190 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8191 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8192 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8193 IEM_MC_LOCAL(uint8_t, u8RegCopy);
8194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8195
8196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8197 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8198 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8199 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
8200 IEM_MC_FETCH_EFLAGS(EFlags);
8201 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8202 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8203 else
8204 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
8205
8206 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
8207 IEM_MC_COMMIT_EFLAGS(EFlags);
8208 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
8209 IEM_MC_ADVANCE_RIP();
8210 IEM_MC_END();
8211 return VINF_SUCCESS;
8212 }
8213 return VINF_SUCCESS;
8214}
8215
8216
8217/** Opcode 0x0f 0xc1. */
8218FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
8219{
8220 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
8221 IEMOP_HLP_MIN_486();
8222 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8223
8224 /*
8225 * If rm is denoting a register, no more instruction bytes.
8226 */
8227 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8228 {
8229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8230
8231 switch (pVCpu->iem.s.enmEffOpSize)
8232 {
8233 case IEMMODE_16BIT:
8234 IEM_MC_BEGIN(3, 0);
8235 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8236 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8237 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8238
8239 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8240 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8241 IEM_MC_REF_EFLAGS(pEFlags);
8242 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8243
8244 IEM_MC_ADVANCE_RIP();
8245 IEM_MC_END();
8246 return VINF_SUCCESS;
8247
8248 case IEMMODE_32BIT:
8249 IEM_MC_BEGIN(3, 0);
8250 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8251 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8252 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8253
8254 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8255 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8256 IEM_MC_REF_EFLAGS(pEFlags);
8257 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8258
8259 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8260 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
8261 IEM_MC_ADVANCE_RIP();
8262 IEM_MC_END();
8263 return VINF_SUCCESS;
8264
8265 case IEMMODE_64BIT:
8266 IEM_MC_BEGIN(3, 0);
8267 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8268 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8269 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8270
8271 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8272 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8273 IEM_MC_REF_EFLAGS(pEFlags);
8274 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8275
8276 IEM_MC_ADVANCE_RIP();
8277 IEM_MC_END();
8278 return VINF_SUCCESS;
8279
8280 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8281 }
8282 }
8283 else
8284 {
8285 /*
8286 * We're accessing memory.
8287 */
8288 switch (pVCpu->iem.s.enmEffOpSize)
8289 {
8290 case IEMMODE_16BIT:
8291 IEM_MC_BEGIN(3, 3);
8292 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8293 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8294 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8295 IEM_MC_LOCAL(uint16_t, u16RegCopy);
8296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8297
8298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8299 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8300 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8301 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
8302 IEM_MC_FETCH_EFLAGS(EFlags);
8303 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8304 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8305 else
8306 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
8307
8308 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8309 IEM_MC_COMMIT_EFLAGS(EFlags);
8310 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
8311 IEM_MC_ADVANCE_RIP();
8312 IEM_MC_END();
8313 return VINF_SUCCESS;
8314
8315 case IEMMODE_32BIT:
8316 IEM_MC_BEGIN(3, 3);
8317 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8318 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8319 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8320 IEM_MC_LOCAL(uint32_t, u32RegCopy);
8321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8322
8323 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8324 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8325 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8326 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
8327 IEM_MC_FETCH_EFLAGS(EFlags);
8328 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8329 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8330 else
8331 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
8332
8333 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8334 IEM_MC_COMMIT_EFLAGS(EFlags);
8335 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
8336 IEM_MC_ADVANCE_RIP();
8337 IEM_MC_END();
8338 return VINF_SUCCESS;
8339
8340 case IEMMODE_64BIT:
8341 IEM_MC_BEGIN(3, 3);
8342 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8343 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8344 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8345 IEM_MC_LOCAL(uint64_t, u64RegCopy);
8346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8347
8348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8349 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8350 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8351 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
8352 IEM_MC_FETCH_EFLAGS(EFlags);
8353 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8354 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8355 else
8356 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
8357
8358 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8359 IEM_MC_COMMIT_EFLAGS(EFlags);
8360 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
8361 IEM_MC_ADVANCE_RIP();
8362 IEM_MC_END();
8363 return VINF_SUCCESS;
8364
8365 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8366 }
8367 }
8368}
8369
8370
8371/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
8372FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
8373/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
8374FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
8375/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
8376FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
8377/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
8378FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
8379
8380
8381/** Opcode 0x0f 0xc3. */
8382FNIEMOP_DEF(iemOp_movnti_My_Gy)
8383{
8384 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
8385
8386 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8387
8388 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
8389 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8390 {
8391 switch (pVCpu->iem.s.enmEffOpSize)
8392 {
8393 case IEMMODE_32BIT:
8394 IEM_MC_BEGIN(0, 2);
8395 IEM_MC_LOCAL(uint32_t, u32Value);
8396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8397
8398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8400 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8401 return IEMOP_RAISE_INVALID_OPCODE();
8402
8403 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8404 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
8405 IEM_MC_ADVANCE_RIP();
8406 IEM_MC_END();
8407 break;
8408
8409 case IEMMODE_64BIT:
8410 IEM_MC_BEGIN(0, 2);
8411 IEM_MC_LOCAL(uint64_t, u64Value);
8412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8413
8414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8416 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8417 return IEMOP_RAISE_INVALID_OPCODE();
8418
8419 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8420 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
8421 IEM_MC_ADVANCE_RIP();
8422 IEM_MC_END();
8423 break;
8424
8425 case IEMMODE_16BIT:
8426 /** @todo check this form. */
8427 return IEMOP_RAISE_INVALID_OPCODE();
8428 }
8429 }
8430 else
8431 return IEMOP_RAISE_INVALID_OPCODE();
8432 return VINF_SUCCESS;
8433}
8434/* Opcode 0x66 0x0f 0xc3 - invalid */
8435/* Opcode 0xf3 0x0f 0xc3 - invalid */
8436/* Opcode 0xf2 0x0f 0xc3 - invalid */
8437
8438/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
8439FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
8440/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
8441FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
8442/* Opcode 0xf3 0x0f 0xc4 - invalid */
8443/* Opcode 0xf2 0x0f 0xc4 - invalid */
8444
8445/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
8446FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
8447/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
8448FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
8449/* Opcode 0xf3 0x0f 0xc5 - invalid */
8450/* Opcode 0xf2 0x0f 0xc5 - invalid */
8451
8452/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
8453FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
8454/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
8455FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
8456/* Opcode 0xf3 0x0f 0xc6 - invalid */
8457/* Opcode 0xf2 0x0f 0xc6 - invalid */
8458
8459
8460/** Opcode 0x0f 0xc7 !11/1. */
8461FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
8462{
8463 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
8464
8465 IEM_MC_BEGIN(4, 3);
8466 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
8467 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
8468 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
8469 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8470 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
8471 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
8472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8473
8474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8475 IEMOP_HLP_DONE_DECODING();
8476 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8477
8478 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
8479 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
8480 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
8481
8482 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
8483 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
8484 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
8485
8486 IEM_MC_FETCH_EFLAGS(EFlags);
8487 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8488 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8489 else
8490 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8491
8492 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
8493 IEM_MC_COMMIT_EFLAGS(EFlags);
8494 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8495 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
8496 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
8497 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
8498 IEM_MC_ENDIF();
8499 IEM_MC_ADVANCE_RIP();
8500
8501 IEM_MC_END();
8502 return VINF_SUCCESS;
8503}
8504
8505
8506/** Opcode REX.W 0x0f 0xc7 !11/1. */
8507FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
8508{
8509 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
8510 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8511 {
8512#if 0
8513 RT_NOREF(bRm);
8514 IEMOP_BITCH_ABOUT_STUB();
8515 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8516#else
8517 IEM_MC_BEGIN(4, 3);
8518 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
8519 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
8520 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
8521 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8522 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
8523 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
8524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8525
8526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8527 IEMOP_HLP_DONE_DECODING();
8528 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
8529 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8530
8531 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
8532 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
8533 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
8534
8535 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
8536 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
8537 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
8538
8539 IEM_MC_FETCH_EFLAGS(EFlags);
8540# ifdef RT_ARCH_AMD64
8541 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8542 {
8543 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8544 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8545 else
8546 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8547 }
8548 else
8549# endif
8550 {
8551 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
8552 accesses and not all all atomic, which works fine on in UNI CPU guest
8553 configuration (ignoring DMA). If guest SMP is active we have no choice
8554 but to use a rendezvous callback here. Sigh. */
8555 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
8556 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8557 else
8558 {
8559 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8560 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
8561 }
8562 }
8563
8564 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
8565 IEM_MC_COMMIT_EFLAGS(EFlags);
8566 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8567 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
8568 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
8569 IEM_MC_ENDIF();
8570 IEM_MC_ADVANCE_RIP();
8571
8572 IEM_MC_END();
8573 return VINF_SUCCESS;
8574#endif
8575 }
8576 Log(("cmpxchg16b -> #UD\n"));
8577 return IEMOP_RAISE_INVALID_OPCODE();
8578}
8579
8580FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
8581{
8582 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
8583 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
8584 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
8585}
8586
8587/** Opcode 0x0f 0xc7 11/6. */
8588FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
8589
8590/** Opcode 0x0f 0xc7 !11/6. */
8591#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8592FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
8593{
8594 IEMOP_MNEMONIC(vmptrld, "vmptrld");
8595 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVInstrDiag_Vmptrld);
8596 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVInstrDiag_Vmptrld);
8597 IEM_MC_BEGIN(2, 0);
8598 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8599 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
8600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8601 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
8602 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8603 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
8604 IEM_MC_END();
8605 return VINF_SUCCESS;
8606}
8607#else
8608FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
8609#endif
8610
8611/** Opcode 0x66 0x0f 0xc7 !11/6. */
8612#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8613FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
8614{
8615 IEMOP_MNEMONIC(vmclear, "vmclear");
8616 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVInstrDiag_Vmclear);
8617 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVInstrDiag_Vmclear);
8618 IEM_MC_BEGIN(2, 0);
8619 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8620 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
8621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8622 IEMOP_HLP_DONE_DECODING();
8623 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8624 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
8625 IEM_MC_END();
8626 return VINF_SUCCESS;
8627}
8628#else
8629FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
8630#endif
8631
8632/** Opcode 0xf3 0x0f 0xc7 !11/6. */
8633#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8634FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
8635{
8636 IEMOP_MNEMONIC(vmxon, "vmxon");
8637 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVInstrDiag_Vmxon);
8638 IEM_MC_BEGIN(2, 0);
8639 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8640 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
8641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8642 IEMOP_HLP_DONE_DECODING();
8643 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8644 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
8645 IEM_MC_END();
8646 return VINF_SUCCESS;
8647}
8648#else
8649FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
8650#endif
8651
8652/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
8653#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8654FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
8655{
8656 IEMOP_MNEMONIC(vmptrst, "vmptrst");
8657 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVInstrDiag_Vmptrst);
8658 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVInstrDiag_Vmptrst);
8659 IEM_MC_BEGIN(2, 0);
8660 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8661 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
8662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8663 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
8664 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8665 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
8666 IEM_MC_END();
8667 return VINF_SUCCESS;
8668}
8669#else
8670FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
8671#endif
8672
8673/** Opcode 0x0f 0xc7 11/7. */
8674FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
8675
8676
8677/**
8678 * Group 9 jump table for register variant.
8679 */
8680IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
8681{ /* pfx: none, 066h, 0f3h, 0f2h */
8682 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8683 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
8684 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8685 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8686 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8687 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8688 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8689 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8690};
8691AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
8692
8693
8694/**
8695 * Group 9 jump table for memory variant.
8696 */
8697IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
8698{ /* pfx: none, 066h, 0f3h, 0f2h */
8699 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8700 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
8701 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8702 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8703 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8704 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8705 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
8706 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8707};
8708AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
8709
8710
8711/** Opcode 0x0f 0xc7. */
8712FNIEMOP_DEF(iemOp_Grp9)
8713{
8714 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
8715 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8716 /* register, register */
8717 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8718 + pVCpu->iem.s.idxPrefix], bRm);
8719 /* memory, register */
8720 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8721 + pVCpu->iem.s.idxPrefix], bRm);
8722}
8723
8724
8725/**
8726 * Common 'bswap register' helper.
8727 */
8728FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
8729{
8730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8731 switch (pVCpu->iem.s.enmEffOpSize)
8732 {
8733 case IEMMODE_16BIT:
8734 IEM_MC_BEGIN(1, 0);
8735 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8736 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
8737 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
8738 IEM_MC_ADVANCE_RIP();
8739 IEM_MC_END();
8740 return VINF_SUCCESS;
8741
8742 case IEMMODE_32BIT:
8743 IEM_MC_BEGIN(1, 0);
8744 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8745 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8746 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8747 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
8748 IEM_MC_ADVANCE_RIP();
8749 IEM_MC_END();
8750 return VINF_SUCCESS;
8751
8752 case IEMMODE_64BIT:
8753 IEM_MC_BEGIN(1, 0);
8754 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8755 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8756 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
8757 IEM_MC_ADVANCE_RIP();
8758 IEM_MC_END();
8759 return VINF_SUCCESS;
8760
8761 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8762 }
8763}
8764
8765
8766/** Opcode 0x0f 0xc8. */
8767FNIEMOP_DEF(iemOp_bswap_rAX_r8)
8768{
8769 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
8770 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
8771 prefix. REX.B is the correct prefix it appears. For a parallel
8772 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
8773 IEMOP_HLP_MIN_486();
8774 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8775}
8776
8777
8778/** Opcode 0x0f 0xc9. */
8779FNIEMOP_DEF(iemOp_bswap_rCX_r9)
8780{
8781 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
8782 IEMOP_HLP_MIN_486();
8783 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8784}
8785
8786
8787/** Opcode 0x0f 0xca. */
8788FNIEMOP_DEF(iemOp_bswap_rDX_r10)
8789{
8790 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
8791 IEMOP_HLP_MIN_486();
8792 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8793}
8794
8795
8796/** Opcode 0x0f 0xcb. */
8797FNIEMOP_DEF(iemOp_bswap_rBX_r11)
8798{
8799 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
8800 IEMOP_HLP_MIN_486();
8801 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8802}
8803
8804
8805/** Opcode 0x0f 0xcc. */
8806FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8807{
8808 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8809 IEMOP_HLP_MIN_486();
8810 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8811}
8812
8813
8814/** Opcode 0x0f 0xcd. */
8815FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8816{
8817 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8818 IEMOP_HLP_MIN_486();
8819 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8820}
8821
8822
8823/** Opcode 0x0f 0xce. */
8824FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8825{
8826 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8827 IEMOP_HLP_MIN_486();
8828 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8829}
8830
8831
8832/** Opcode 0x0f 0xcf. */
8833FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8834{
8835 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8836 IEMOP_HLP_MIN_486();
8837 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8838}
8839
8840
8841/* Opcode 0x0f 0xd0 - invalid */
8842/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8843FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8844/* Opcode 0xf3 0x0f 0xd0 - invalid */
8845/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8846FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8847
8848/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8849FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8850/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8851FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8852/* Opcode 0xf3 0x0f 0xd1 - invalid */
8853/* Opcode 0xf2 0x0f 0xd1 - invalid */
8854
8855/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8856FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8857/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8858FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8859/* Opcode 0xf3 0x0f 0xd2 - invalid */
8860/* Opcode 0xf2 0x0f 0xd2 - invalid */
8861
8862/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8863FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8864/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8865FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8866/* Opcode 0xf3 0x0f 0xd3 - invalid */
8867/* Opcode 0xf2 0x0f 0xd3 - invalid */
8868
8869/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8870FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8871/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8872FNIEMOP_STUB(iemOp_paddq_Vx_W);
8873/* Opcode 0xf3 0x0f 0xd4 - invalid */
8874/* Opcode 0xf2 0x0f 0xd4 - invalid */
8875
8876/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8877FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8878/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8879FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8880/* Opcode 0xf3 0x0f 0xd5 - invalid */
8881/* Opcode 0xf2 0x0f 0xd5 - invalid */
8882
8883/* Opcode 0x0f 0xd6 - invalid */
8884
8885/**
8886 * @opcode 0xd6
8887 * @oppfx 0x66
8888 * @opcpuid sse2
8889 * @opgroup og_sse2_pcksclr_datamove
8890 * @opxcpttype none
8891 * @optest op1=-1 op2=2 -> op1=2
8892 * @optest op1=0 op2=-42 -> op1=-42
8893 */
8894FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8895{
8896 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8897 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8898 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8899 {
8900 /*
8901 * Register, register.
8902 */
8903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8904 IEM_MC_BEGIN(0, 2);
8905 IEM_MC_LOCAL(uint64_t, uSrc);
8906
8907 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8908 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8909
8910 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8911 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8912
8913 IEM_MC_ADVANCE_RIP();
8914 IEM_MC_END();
8915 }
8916 else
8917 {
8918 /*
8919 * Memory, register.
8920 */
8921 IEM_MC_BEGIN(0, 2);
8922 IEM_MC_LOCAL(uint64_t, uSrc);
8923 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8924
8925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8927 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8928 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8929
8930 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8931 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8932
8933 IEM_MC_ADVANCE_RIP();
8934 IEM_MC_END();
8935 }
8936 return VINF_SUCCESS;
8937}
8938
8939
8940/**
8941 * @opcode 0xd6
8942 * @opcodesub 11 mr/reg
8943 * @oppfx f3
8944 * @opcpuid sse2
8945 * @opgroup og_sse2_simdint_datamove
8946 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8947 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8948 */
8949FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
8950{
8951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8952 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8953 {
8954 /*
8955 * Register, register.
8956 */
8957 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8959 IEM_MC_BEGIN(0, 1);
8960 IEM_MC_LOCAL(uint64_t, uSrc);
8961
8962 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8963 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8964
8965 IEM_MC_FETCH_MREG_U64(uSrc, bRm & X86_MODRM_RM_MASK);
8966 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
8967 IEM_MC_FPU_TO_MMX_MODE();
8968
8969 IEM_MC_ADVANCE_RIP();
8970 IEM_MC_END();
8971 return VINF_SUCCESS;
8972 }
8973
8974 /**
8975 * @opdone
8976 * @opmnemonic udf30fd6mem
8977 * @opcode 0xd6
8978 * @opcodesub !11 mr/reg
8979 * @oppfx f3
8980 * @opunused intel-modrm
8981 * @opcpuid sse
8982 * @optest ->
8983 */
8984 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8985}
8986
8987
8988/**
8989 * @opcode 0xd6
8990 * @opcodesub 11 mr/reg
8991 * @oppfx f2
8992 * @opcpuid sse2
8993 * @opgroup og_sse2_simdint_datamove
8994 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8995 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8996 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
8997 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
8998 * @optest op1=-42 op2=0xfedcba9876543210
8999 * -> op1=0xfedcba9876543210 ftw=0xff
9000 */
9001FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
9002{
9003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9004 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9005 {
9006 /*
9007 * Register, register.
9008 */
9009 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9011 IEM_MC_BEGIN(0, 1);
9012 IEM_MC_LOCAL(uint64_t, uSrc);
9013
9014 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9015 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9016
9017 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9018 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, uSrc);
9019 IEM_MC_FPU_TO_MMX_MODE();
9020
9021 IEM_MC_ADVANCE_RIP();
9022 IEM_MC_END();
9023 return VINF_SUCCESS;
9024 }
9025
9026 /**
9027 * @opdone
9028 * @opmnemonic udf20fd6mem
9029 * @opcode 0xd6
9030 * @opcodesub !11 mr/reg
9031 * @oppfx f2
9032 * @opunused intel-modrm
9033 * @opcpuid sse
9034 * @optest ->
9035 */
9036 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
9037}
9038
9039/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
9040FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
9041{
9042 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
9043 /** @todo testcase: Check that the instruction implicitly clears the high
9044 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
9045 * and opcode modifications are made to work with the whole width (not
9046 * just 128). */
9047 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
9048 /* Docs says register only. */
9049 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9050 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
9051 {
9052 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
9053 IEM_MC_BEGIN(2, 0);
9054 IEM_MC_ARG(uint64_t *, pDst, 0);
9055 IEM_MC_ARG(uint64_t const *, pSrc, 1);
9056 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
9057 IEM_MC_PREPARE_FPU_USAGE();
9058 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9059 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
9060 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
9061 IEM_MC_ADVANCE_RIP();
9062 IEM_MC_END();
9063 return VINF_SUCCESS;
9064 }
9065 return IEMOP_RAISE_INVALID_OPCODE();
9066}
9067
9068/** Opcode 0x66 0x0f 0xd7 - */
9069FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
9070{
9071 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
9072 /** @todo testcase: Check that the instruction implicitly clears the high
9073 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
9074 * and opcode modifications are made to work with the whole width (not
9075 * just 128). */
9076 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
9077 /* Docs says register only. */
9078 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9079 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
9080 {
9081 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
9082 IEM_MC_BEGIN(2, 0);
9083 IEM_MC_ARG(uint64_t *, pDst, 0);
9084 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
9085 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9086 IEM_MC_PREPARE_SSE_USAGE();
9087 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9088 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9089 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
9090 IEM_MC_ADVANCE_RIP();
9091 IEM_MC_END();
9092 return VINF_SUCCESS;
9093 }
9094 return IEMOP_RAISE_INVALID_OPCODE();
9095}
9096
9097/* Opcode 0xf3 0x0f 0xd7 - invalid */
9098/* Opcode 0xf2 0x0f 0xd7 - invalid */
9099
9100
9101/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
9102FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
9103/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
9104FNIEMOP_STUB(iemOp_psubusb_Vx_W);
9105/* Opcode 0xf3 0x0f 0xd8 - invalid */
9106/* Opcode 0xf2 0x0f 0xd8 - invalid */
9107
9108/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
9109FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
9110/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
9111FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
9112/* Opcode 0xf3 0x0f 0xd9 - invalid */
9113/* Opcode 0xf2 0x0f 0xd9 - invalid */
9114
9115/** Opcode 0x0f 0xda - pminub Pq, Qq */
9116FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
9117/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
9118FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
9119/* Opcode 0xf3 0x0f 0xda - invalid */
9120/* Opcode 0xf2 0x0f 0xda - invalid */
9121
9122/** Opcode 0x0f 0xdb - pand Pq, Qq */
9123FNIEMOP_STUB(iemOp_pand_Pq_Qq);
9124/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
9125FNIEMOP_STUB(iemOp_pand_Vx_W);
9126/* Opcode 0xf3 0x0f 0xdb - invalid */
9127/* Opcode 0xf2 0x0f 0xdb - invalid */
9128
9129/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
9130FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
9131/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
9132FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
9133/* Opcode 0xf3 0x0f 0xdc - invalid */
9134/* Opcode 0xf2 0x0f 0xdc - invalid */
9135
9136/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
9137FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
9138/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
9139FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
9140/* Opcode 0xf3 0x0f 0xdd - invalid */
9141/* Opcode 0xf2 0x0f 0xdd - invalid */
9142
9143/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
9144FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
9145/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
9146FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
9147/* Opcode 0xf3 0x0f 0xde - invalid */
9148/* Opcode 0xf2 0x0f 0xde - invalid */
9149
9150/** Opcode 0x0f 0xdf - pandn Pq, Qq */
9151FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
9152/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
9153FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
9154/* Opcode 0xf3 0x0f 0xdf - invalid */
9155/* Opcode 0xf2 0x0f 0xdf - invalid */
9156
9157/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
9158FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
9159/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
9160FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
9161/* Opcode 0xf3 0x0f 0xe0 - invalid */
9162/* Opcode 0xf2 0x0f 0xe0 - invalid */
9163
9164/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
9165FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
9166/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
9167FNIEMOP_STUB(iemOp_psraw_Vx_W);
9168/* Opcode 0xf3 0x0f 0xe1 - invalid */
9169/* Opcode 0xf2 0x0f 0xe1 - invalid */
9170
9171/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
9172FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
9173/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
9174FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
9175/* Opcode 0xf3 0x0f 0xe2 - invalid */
9176/* Opcode 0xf2 0x0f 0xe2 - invalid */
9177
9178/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
9179FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
9180/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
9181FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
9182/* Opcode 0xf3 0x0f 0xe3 - invalid */
9183/* Opcode 0xf2 0x0f 0xe3 - invalid */
9184
9185/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
9186FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
9187/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
9188FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
9189/* Opcode 0xf3 0x0f 0xe4 - invalid */
9190/* Opcode 0xf2 0x0f 0xe4 - invalid */
9191
9192/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
9193FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
9194/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
9195FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
9196/* Opcode 0xf3 0x0f 0xe5 - invalid */
9197/* Opcode 0xf2 0x0f 0xe5 - invalid */
9198
9199/* Opcode 0x0f 0xe6 - invalid */
9200/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
9201FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
9202/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
9203FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
9204/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
9205FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
9206
9207
9208/**
9209 * @opcode 0xe7
9210 * @opcodesub !11 mr/reg
9211 * @oppfx none
9212 * @opcpuid sse
9213 * @opgroup og_sse1_cachect
9214 * @opxcpttype none
9215 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
9216 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
9217 */
9218FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
9219{
9220 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9222 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9223 {
9224 /* Register, memory. */
9225 IEM_MC_BEGIN(0, 2);
9226 IEM_MC_LOCAL(uint64_t, uSrc);
9227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9228
9229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9231 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
9232 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9233
9234 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9235 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9236 IEM_MC_FPU_TO_MMX_MODE();
9237
9238 IEM_MC_ADVANCE_RIP();
9239 IEM_MC_END();
9240 return VINF_SUCCESS;
9241 }
9242 /**
9243 * @opdone
9244 * @opmnemonic ud0fe7reg
9245 * @opcode 0xe7
9246 * @opcodesub 11 mr/reg
9247 * @oppfx none
9248 * @opunused immediate
9249 * @opcpuid sse
9250 * @optest ->
9251 */
9252 return IEMOP_RAISE_INVALID_OPCODE();
9253}
9254
9255/**
9256 * @opcode 0xe7
9257 * @opcodesub !11 mr/reg
9258 * @oppfx 0x66
9259 * @opcpuid sse2
9260 * @opgroup og_sse2_cachect
9261 * @opxcpttype 1
9262 * @optest op1=-1 op2=2 -> op1=2
9263 * @optest op1=0 op2=-42 -> op1=-42
9264 */
9265FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
9266{
9267 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9269 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9270 {
9271 /* Register, memory. */
9272 IEM_MC_BEGIN(0, 2);
9273 IEM_MC_LOCAL(RTUINT128U, uSrc);
9274 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9275
9276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9278 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9279 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9280
9281 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9282 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9283
9284 IEM_MC_ADVANCE_RIP();
9285 IEM_MC_END();
9286 return VINF_SUCCESS;
9287 }
9288
9289 /**
9290 * @opdone
9291 * @opmnemonic ud660fe7reg
9292 * @opcode 0xe7
9293 * @opcodesub 11 mr/reg
9294 * @oppfx 0x66
9295 * @opunused immediate
9296 * @opcpuid sse
9297 * @optest ->
9298 */
9299 return IEMOP_RAISE_INVALID_OPCODE();
9300}
9301
9302/* Opcode 0xf3 0x0f 0xe7 - invalid */
9303/* Opcode 0xf2 0x0f 0xe7 - invalid */
9304
9305
9306/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
9307FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
9308/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
9309FNIEMOP_STUB(iemOp_psubsb_Vx_W);
9310/* Opcode 0xf3 0x0f 0xe8 - invalid */
9311/* Opcode 0xf2 0x0f 0xe8 - invalid */
9312
9313/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
9314FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
9315/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
9316FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
9317/* Opcode 0xf3 0x0f 0xe9 - invalid */
9318/* Opcode 0xf2 0x0f 0xe9 - invalid */
9319
9320/** Opcode 0x0f 0xea - pminsw Pq, Qq */
9321FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
9322/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
9323FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
9324/* Opcode 0xf3 0x0f 0xea - invalid */
9325/* Opcode 0xf2 0x0f 0xea - invalid */
9326
9327/** Opcode 0x0f 0xeb - por Pq, Qq */
9328FNIEMOP_STUB(iemOp_por_Pq_Qq);
9329/** Opcode 0x66 0x0f 0xeb - por Vx, W */
9330FNIEMOP_STUB(iemOp_por_Vx_W);
9331/* Opcode 0xf3 0x0f 0xeb - invalid */
9332/* Opcode 0xf2 0x0f 0xeb - invalid */
9333
9334/** Opcode 0x0f 0xec - paddsb Pq, Qq */
9335FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
9336/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
9337FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
9338/* Opcode 0xf3 0x0f 0xec - invalid */
9339/* Opcode 0xf2 0x0f 0xec - invalid */
9340
9341/** Opcode 0x0f 0xed - paddsw Pq, Qq */
9342FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
9343/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
9344FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
9345/* Opcode 0xf3 0x0f 0xed - invalid */
9346/* Opcode 0xf2 0x0f 0xed - invalid */
9347
9348/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
9349FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
9350/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
9351FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
9352/* Opcode 0xf3 0x0f 0xee - invalid */
9353/* Opcode 0xf2 0x0f 0xee - invalid */
9354
9355
9356/** Opcode 0x0f 0xef - pxor Pq, Qq */
9357FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
9358{
9359 IEMOP_MNEMONIC(pxor, "pxor");
9360 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
9361}
9362
9363/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
9364FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
9365{
9366 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
9367 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
9368}
9369
9370/* Opcode 0xf3 0x0f 0xef - invalid */
9371/* Opcode 0xf2 0x0f 0xef - invalid */
9372
9373/* Opcode 0x0f 0xf0 - invalid */
9374/* Opcode 0x66 0x0f 0xf0 - invalid */
9375/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
9376FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
9377
9378/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
9379FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
9380/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
9381FNIEMOP_STUB(iemOp_psllw_Vx_W);
9382/* Opcode 0xf2 0x0f 0xf1 - invalid */
9383
9384/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
9385FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
9386/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
9387FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
9388/* Opcode 0xf2 0x0f 0xf2 - invalid */
9389
9390/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
9391FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
9392/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
9393FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
9394/* Opcode 0xf2 0x0f 0xf3 - invalid */
9395
9396/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
9397FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
9398/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
9399FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
9400/* Opcode 0xf2 0x0f 0xf4 - invalid */
9401
9402/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
9403FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
9404/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
9405FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
9406/* Opcode 0xf2 0x0f 0xf5 - invalid */
9407
9408/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
9409FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
9410/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
9411FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
9412/* Opcode 0xf2 0x0f 0xf6 - invalid */
9413
9414/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
9415FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
9416/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
9417FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
9418/* Opcode 0xf2 0x0f 0xf7 - invalid */
9419
9420/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
9421FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
9422/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
9423FNIEMOP_STUB(iemOp_psubb_Vx_W);
9424/* Opcode 0xf2 0x0f 0xf8 - invalid */
9425
9426/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
9427FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
9428/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
9429FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
9430/* Opcode 0xf2 0x0f 0xf9 - invalid */
9431
9432/** Opcode 0x0f 0xfa - psubd Pq, Qq */
9433FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
9434/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
9435FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
9436/* Opcode 0xf2 0x0f 0xfa - invalid */
9437
9438/** Opcode 0x0f 0xfb - psubq Pq, Qq */
9439FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
9440/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
9441FNIEMOP_STUB(iemOp_psubq_Vx_W);
9442/* Opcode 0xf2 0x0f 0xfb - invalid */
9443
9444/** Opcode 0x0f 0xfc - paddb Pq, Qq */
9445FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
9446/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
9447FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
9448/* Opcode 0xf2 0x0f 0xfc - invalid */
9449
9450/** Opcode 0x0f 0xfd - paddw Pq, Qq */
9451FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
9452/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
9453FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
9454/* Opcode 0xf2 0x0f 0xfd - invalid */
9455
9456/** Opcode 0x0f 0xfe - paddd Pq, Qq */
9457FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
9458/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
9459FNIEMOP_STUB(iemOp_paddd_Vx_W);
9460/* Opcode 0xf2 0x0f 0xfe - invalid */
9461
9462
9463/** Opcode **** 0x0f 0xff - UD0 */
9464FNIEMOP_DEF(iemOp_ud0)
9465{
9466 IEMOP_MNEMONIC(ud0, "ud0");
9467 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
9468 {
9469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
9470#ifndef TST_IEM_CHECK_MC
9471 RTGCPTR GCPtrEff;
9472 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
9473 if (rcStrict != VINF_SUCCESS)
9474 return rcStrict;
9475#endif
9476 IEMOP_HLP_DONE_DECODING();
9477 }
9478 return IEMOP_RAISE_INVALID_OPCODE();
9479}
9480
9481
9482
9483/**
9484 * Two byte opcode map, first byte 0x0f.
9485 *
9486 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
9487 * check if it needs updating as well when making changes.
9488 */
9489IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
9490{
9491 /* no prefix, 066h prefix f3h prefix, f2h prefix */
9492 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
9493 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
9494 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
9495 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
9496 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
9497 /* 0x05 */ IEMOP_X4(iemOp_syscall),
9498 /* 0x06 */ IEMOP_X4(iemOp_clts),
9499 /* 0x07 */ IEMOP_X4(iemOp_sysret),
9500 /* 0x08 */ IEMOP_X4(iemOp_invd),
9501 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
9502 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
9503 /* 0x0b */ IEMOP_X4(iemOp_ud2),
9504 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
9505 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
9506 /* 0x0e */ IEMOP_X4(iemOp_femms),
9507 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
9508
9509 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
9510 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
9511 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
9512 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9513 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9514 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9515 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
9516 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9517 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
9518 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
9519 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
9520 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
9521 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
9522 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
9523 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
9524 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
9525
9526 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
9527 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
9528 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
9529 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
9530 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
9531 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9532 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
9533 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9534 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9535 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9536 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
9537 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9538 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
9539 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
9540 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9541 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9542
9543 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
9544 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
9545 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
9546 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
9547 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
9548 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
9549 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
9550 /* 0x37 */ IEMOP_X4(iemOp_getsec),
9551 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
9552 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9553 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
9554 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9555 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9556 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9557 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9558 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9559
9560 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
9561 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
9562 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
9563 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
9564 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
9565 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
9566 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
9567 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
9568 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
9569 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
9570 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
9571 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
9572 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
9573 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
9574 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
9575 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
9576
9577 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9578 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
9579 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
9580 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
9581 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9582 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9583 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9584 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9585 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
9586 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
9587 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
9588 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
9589 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
9590 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
9591 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
9592 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
9593
9594 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9595 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9596 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9597 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9598 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9599 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9600 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9601 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9602 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9603 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9604 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9605 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9606 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9607 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9608 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9609 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
9610
9611 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
9612 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
9613 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
9614 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
9615 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9616 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9617 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9618 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9619
9620 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9621 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9622 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9623 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9624 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
9625 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
9626 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
9627 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
9628
9629 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
9630 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
9631 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
9632 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
9633 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
9634 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
9635 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
9636 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
9637 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
9638 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
9639 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
9640 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
9641 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
9642 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
9643 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
9644 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
9645
9646 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
9647 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
9648 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
9649 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
9650 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
9651 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
9652 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
9653 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
9654 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
9655 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
9656 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
9657 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
9658 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
9659 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
9660 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
9661 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
9662
9663 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
9664 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
9665 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
9666 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
9667 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
9668 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
9669 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
9670 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
9671 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
9672 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
9673 /* 0xaa */ IEMOP_X4(iemOp_rsm),
9674 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
9675 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
9676 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
9677 /* 0xae */ IEMOP_X4(iemOp_Grp15),
9678 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
9679
9680 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
9681 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
9682 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
9683 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
9684 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
9685 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
9686 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
9687 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
9688 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
9689 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
9690 /* 0xba */ IEMOP_X4(iemOp_Grp8),
9691 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
9692 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
9693 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
9694 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
9695 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
9696
9697 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
9698 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
9699 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
9700 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9701 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9702 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9703 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9704 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
9705 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
9706 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
9707 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
9708 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
9709 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
9710 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
9711 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
9712 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
9713
9714 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
9715 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9716 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9717 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9718 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9719 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9720 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
9721 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9722 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9723 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9724 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9725 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9726 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9727 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9728 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9729 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9730
9731 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9732 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9733 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9734 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9735 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9736 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9737 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
9738 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9739 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9740 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9741 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9742 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9743 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9744 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9745 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9746 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9747
9748 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
9749 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9750 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9751 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9752 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9753 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9754 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9755 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9756 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9757 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9758 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9759 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9760 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9761 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9762 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9763 /* 0xff */ IEMOP_X4(iemOp_ud0),
9764};
9765AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
9766
9767/** @} */
9768
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette