VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 74251

Last change on this file since 74251 was 74155, checked in by vboxsync, 6 years ago

VMM: Nested VMX: bugref:9180 VMXVDIAG naming.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 342.4 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 74155 2018-09-09 12:37:26Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
38 }
39
40 /* Ignore operand size here, memory refs are always 16-bit. */
41 IEM_MC_BEGIN(2, 0);
42 IEM_MC_ARG(uint16_t, iEffSeg, 0);
43 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
44 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
45 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
46 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
47 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
48 IEM_MC_END();
49 return VINF_SUCCESS;
50}
51
52
53/** Opcode 0x0f 0x00 /1. */
54FNIEMOPRM_DEF(iemOp_Grp6_str)
55{
56 IEMOP_MNEMONIC(str, "str Rv/Mw");
57 IEMOP_HLP_MIN_286();
58 IEMOP_HLP_NO_REAL_OR_V86_MODE();
59
60
61 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
62 {
63 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
64 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
65 }
66
67 /* Ignore operand size here, memory refs are always 16-bit. */
68 IEM_MC_BEGIN(2, 0);
69 IEM_MC_ARG(uint16_t, iEffSeg, 0);
70 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
71 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
72 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
73 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
74 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
75 IEM_MC_END();
76 return VINF_SUCCESS;
77}
78
79
80/** Opcode 0x0f 0x00 /2. */
81FNIEMOPRM_DEF(iemOp_Grp6_lldt)
82{
83 IEMOP_MNEMONIC(lldt, "lldt Ew");
84 IEMOP_HLP_MIN_286();
85 IEMOP_HLP_NO_REAL_OR_V86_MODE();
86
87 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
88 {
89 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
90 IEM_MC_BEGIN(1, 0);
91 IEM_MC_ARG(uint16_t, u16Sel, 0);
92 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
93 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
94 IEM_MC_END();
95 }
96 else
97 {
98 IEM_MC_BEGIN(1, 1);
99 IEM_MC_ARG(uint16_t, u16Sel, 0);
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
102 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
103 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
104 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
105 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
106 IEM_MC_END();
107 }
108 return VINF_SUCCESS;
109}
110
111
112/** Opcode 0x0f 0x00 /3. */
113FNIEMOPRM_DEF(iemOp_Grp6_ltr)
114{
115 IEMOP_MNEMONIC(ltr, "ltr Ew");
116 IEMOP_HLP_MIN_286();
117 IEMOP_HLP_NO_REAL_OR_V86_MODE();
118
119 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
120 {
121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
122 IEM_MC_BEGIN(1, 0);
123 IEM_MC_ARG(uint16_t, u16Sel, 0);
124 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
125 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
126 IEM_MC_END();
127 }
128 else
129 {
130 IEM_MC_BEGIN(1, 1);
131 IEM_MC_ARG(uint16_t, u16Sel, 0);
132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
135 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
136 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
137 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
138 IEM_MC_END();
139 }
140 return VINF_SUCCESS;
141}
142
143
144/** Opcode 0x0f 0x00 /3. */
145FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
146{
147 IEMOP_HLP_MIN_286();
148 IEMOP_HLP_NO_REAL_OR_V86_MODE();
149
150 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
151 {
152 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
153 IEM_MC_BEGIN(2, 0);
154 IEM_MC_ARG(uint16_t, u16Sel, 0);
155 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
156 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
157 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
158 IEM_MC_END();
159 }
160 else
161 {
162 IEM_MC_BEGIN(2, 1);
163 IEM_MC_ARG(uint16_t, u16Sel, 0);
164 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
167 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
168 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
169 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
170 IEM_MC_END();
171 }
172 return VINF_SUCCESS;
173}
174
175
176/** Opcode 0x0f 0x00 /4. */
177FNIEMOPRM_DEF(iemOp_Grp6_verr)
178{
179 IEMOP_MNEMONIC(verr, "verr Ew");
180 IEMOP_HLP_MIN_286();
181 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
182}
183
184
185/** Opcode 0x0f 0x00 /5. */
186FNIEMOPRM_DEF(iemOp_Grp6_verw)
187{
188 IEMOP_MNEMONIC(verw, "verw Ew");
189 IEMOP_HLP_MIN_286();
190 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
191}
192
193
194/**
195 * Group 6 jump table.
196 */
197IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
198{
199 iemOp_Grp6_sldt,
200 iemOp_Grp6_str,
201 iemOp_Grp6_lldt,
202 iemOp_Grp6_ltr,
203 iemOp_Grp6_verr,
204 iemOp_Grp6_verw,
205 iemOp_InvalidWithRM,
206 iemOp_InvalidWithRM
207};
208
209/** Opcode 0x0f 0x00. */
210FNIEMOP_DEF(iemOp_Grp6)
211{
212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
213 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
214}
215
216
217/** Opcode 0x0f 0x01 /0. */
218FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
219{
220 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
221 IEMOP_HLP_MIN_286();
222 IEMOP_HLP_64BIT_OP_SIZE();
223 IEM_MC_BEGIN(2, 1);
224 IEM_MC_ARG(uint8_t, iEffSeg, 0);
225 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
228 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
229 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
230 IEM_MC_END();
231 return VINF_SUCCESS;
232}
233
234
235/** Opcode 0x0f 0x01 /0. */
236FNIEMOP_DEF(iemOp_Grp7_vmcall)
237{
238 IEMOP_MNEMONIC(vmcall, "vmcall");
239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
240
241 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
242 want all hypercalls regardless of instruction used, and if a
243 hypercall isn't handled by GIM or HMSvm will raise an #UD.
244 (NEM/win makes ASSUMPTIONS about this behavior.) */
245 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
246}
247
248
249/** Opcode 0x0f 0x01 /0. */
250#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
251FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
252{
253 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
254 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
255 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
256 IEMOP_HLP_DONE_DECODING();
257 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
258}
259#else
260FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
261{
262 IEMOP_BITCH_ABOUT_STUB();
263 return IEMOP_RAISE_INVALID_OPCODE();
264}
265#endif
266
267
268/** Opcode 0x0f 0x01 /0. */
269#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
270FNIEMOP_DEF(iemOp_Grp7_vmresume)
271{
272 IEMOP_MNEMONIC(vmresume, "vmresume");
273 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
274 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
275 IEMOP_HLP_DONE_DECODING();
276 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume);
277}
278#else
279FNIEMOP_DEF(iemOp_Grp7_vmresume)
280{
281 IEMOP_BITCH_ABOUT_STUB();
282 return IEMOP_RAISE_INVALID_OPCODE();
283}
284#endif
285
286
287/** Opcode 0x0f 0x01 /0. */
288#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
289FNIEMOP_DEF(iemOp_Grp7_vmxoff)
290{
291 IEMOP_MNEMONIC(vmxoff, "vmxoff");
292 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
293 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
294 IEMOP_HLP_DONE_DECODING();
295 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
296}
297#else
298FNIEMOP_DEF(iemOp_Grp7_vmxoff)
299{
300 IEMOP_BITCH_ABOUT_STUB();
301 return IEMOP_RAISE_INVALID_OPCODE();
302}
303#endif
304
305
306/** Opcode 0x0f 0x01 /1. */
307FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
308{
309 IEMOP_MNEMONIC(sidt, "sidt Ms");
310 IEMOP_HLP_MIN_286();
311 IEMOP_HLP_64BIT_OP_SIZE();
312 IEM_MC_BEGIN(2, 1);
313 IEM_MC_ARG(uint8_t, iEffSeg, 0);
314 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
317 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
318 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
319 IEM_MC_END();
320 return VINF_SUCCESS;
321}
322
323
324/** Opcode 0x0f 0x01 /1. */
325FNIEMOP_DEF(iemOp_Grp7_monitor)
326{
327 IEMOP_MNEMONIC(monitor, "monitor");
328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
329 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
330}
331
332
333/** Opcode 0x0f 0x01 /1. */
334FNIEMOP_DEF(iemOp_Grp7_mwait)
335{
336 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
338 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
339}
340
341
342/** Opcode 0x0f 0x01 /2. */
343FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
344{
345 IEMOP_MNEMONIC(lgdt, "lgdt");
346 IEMOP_HLP_64BIT_OP_SIZE();
347 IEM_MC_BEGIN(3, 1);
348 IEM_MC_ARG(uint8_t, iEffSeg, 0);
349 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
350 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
353 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
354 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
355 IEM_MC_END();
356 return VINF_SUCCESS;
357}
358
359
360/** Opcode 0x0f 0x01 0xd0. */
361FNIEMOP_DEF(iemOp_Grp7_xgetbv)
362{
363 IEMOP_MNEMONIC(xgetbv, "xgetbv");
364 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
365 {
366 /** @todo r=ramshankar: We should use
367 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
368 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
369 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
370 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
371 }
372 return IEMOP_RAISE_INVALID_OPCODE();
373}
374
375
376/** Opcode 0x0f 0x01 0xd1. */
377FNIEMOP_DEF(iemOp_Grp7_xsetbv)
378{
379 IEMOP_MNEMONIC(xsetbv, "xsetbv");
380 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
381 {
382 /** @todo r=ramshankar: We should use
383 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
384 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
385 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
386 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
387 }
388 return IEMOP_RAISE_INVALID_OPCODE();
389}
390
391
392/** Opcode 0x0f 0x01 /3. */
393FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
394{
395 IEMOP_MNEMONIC(lidt, "lidt");
396 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
397 ? IEMMODE_64BIT
398 : pVCpu->iem.s.enmEffOpSize;
399 IEM_MC_BEGIN(3, 1);
400 IEM_MC_ARG(uint8_t, iEffSeg, 0);
401 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
402 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
405 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
406 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
407 IEM_MC_END();
408 return VINF_SUCCESS;
409}
410
411
412/** Opcode 0x0f 0x01 0xd8. */
413#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
414FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
415{
416 IEMOP_MNEMONIC(vmrun, "vmrun");
417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
418 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
419}
420#else
421FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
422#endif
423
424/** Opcode 0x0f 0x01 0xd9. */
425FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
426{
427 IEMOP_MNEMONIC(vmmcall, "vmmcall");
428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
429
430 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
431 want all hypercalls regardless of instruction used, and if a
432 hypercall isn't handled by GIM or HMSvm will raise an #UD.
433 (NEM/win makes ASSUMPTIONS about this behavior.) */
434 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
435}
436
437/** Opcode 0x0f 0x01 0xda. */
438#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
439FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
440{
441 IEMOP_MNEMONIC(vmload, "vmload");
442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
443 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
444}
445#else
446FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
447#endif
448
449
450/** Opcode 0x0f 0x01 0xdb. */
451#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
452FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
453{
454 IEMOP_MNEMONIC(vmsave, "vmsave");
455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
456 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
457}
458#else
459FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
460#endif
461
462
463/** Opcode 0x0f 0x01 0xdc. */
464#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
465FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
466{
467 IEMOP_MNEMONIC(stgi, "stgi");
468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
469 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
470}
471#else
472FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
473#endif
474
475
476/** Opcode 0x0f 0x01 0xdd. */
477#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
478FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
479{
480 IEMOP_MNEMONIC(clgi, "clgi");
481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
482 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
483}
484#else
485FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
486#endif
487
488
489/** Opcode 0x0f 0x01 0xdf. */
490#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
491FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
492{
493 IEMOP_MNEMONIC(invlpga, "invlpga");
494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
495 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
496}
497#else
498FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
499#endif
500
501
502/** Opcode 0x0f 0x01 0xde. */
503#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
504FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
505{
506 IEMOP_MNEMONIC(skinit, "skinit");
507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
508 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
509}
510#else
511FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
512#endif
513
514
515/** Opcode 0x0f 0x01 /4. */
516FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
517{
518 IEMOP_MNEMONIC(smsw, "smsw");
519 IEMOP_HLP_MIN_286();
520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
521 {
522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
523 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
524 }
525
526 /* Ignore operand size here, memory refs are always 16-bit. */
527 IEM_MC_BEGIN(2, 0);
528 IEM_MC_ARG(uint16_t, iEffSeg, 0);
529 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
532 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
533 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
534 IEM_MC_END();
535 return VINF_SUCCESS;
536}
537
538
539/** Opcode 0x0f 0x01 /6. */
540FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
541{
542 /* The operand size is effectively ignored, all is 16-bit and only the
543 lower 3-bits are used. */
544 IEMOP_MNEMONIC(lmsw, "lmsw");
545 IEMOP_HLP_MIN_286();
546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
547 {
548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
549 IEM_MC_BEGIN(1, 0);
550 IEM_MC_ARG(uint16_t, u16Tmp, 0);
551 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
552 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
553 IEM_MC_END();
554 }
555 else
556 {
557 IEM_MC_BEGIN(1, 1);
558 IEM_MC_ARG(uint16_t, u16Tmp, 0);
559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
562 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
563 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
564 IEM_MC_END();
565 }
566 return VINF_SUCCESS;
567}
568
569
570/** Opcode 0x0f 0x01 /7. */
571FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
572{
573 IEMOP_MNEMONIC(invlpg, "invlpg");
574 IEMOP_HLP_MIN_486();
575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
576 IEM_MC_BEGIN(1, 1);
577 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
579 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
580 IEM_MC_END();
581 return VINF_SUCCESS;
582}
583
584
585/** Opcode 0x0f 0x01 /7. */
586FNIEMOP_DEF(iemOp_Grp7_swapgs)
587{
588 IEMOP_MNEMONIC(swapgs, "swapgs");
589 IEMOP_HLP_ONLY_64BIT();
590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
591 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
592}
593
594
595/** Opcode 0x0f 0x01 /7. */
596FNIEMOP_DEF(iemOp_Grp7_rdtscp)
597{
598 IEMOP_MNEMONIC(rdtscp, "rdtscp");
599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
600 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
601}
602
603
604/**
605 * Group 7 jump table, memory variant.
606 */
607IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
608{
609 iemOp_Grp7_sgdt,
610 iemOp_Grp7_sidt,
611 iemOp_Grp7_lgdt,
612 iemOp_Grp7_lidt,
613 iemOp_Grp7_smsw,
614 iemOp_InvalidWithRM,
615 iemOp_Grp7_lmsw,
616 iemOp_Grp7_invlpg
617};
618
619
620/** Opcode 0x0f 0x01. */
621FNIEMOP_DEF(iemOp_Grp7)
622{
623 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
624 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
625 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
626
627 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
628 {
629 case 0:
630 switch (bRm & X86_MODRM_RM_MASK)
631 {
632 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
633 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
634 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
635 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
636 }
637 return IEMOP_RAISE_INVALID_OPCODE();
638
639 case 1:
640 switch (bRm & X86_MODRM_RM_MASK)
641 {
642 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
643 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
644 }
645 return IEMOP_RAISE_INVALID_OPCODE();
646
647 case 2:
648 switch (bRm & X86_MODRM_RM_MASK)
649 {
650 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
651 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
652 }
653 return IEMOP_RAISE_INVALID_OPCODE();
654
655 case 3:
656 switch (bRm & X86_MODRM_RM_MASK)
657 {
658 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
659 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
660 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
661 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
662 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
663 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
664 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
665 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
666 IEM_NOT_REACHED_DEFAULT_CASE_RET();
667 }
668
669 case 4:
670 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
671
672 case 5:
673 return IEMOP_RAISE_INVALID_OPCODE();
674
675 case 6:
676 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
677
678 case 7:
679 switch (bRm & X86_MODRM_RM_MASK)
680 {
681 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
682 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
683 }
684 return IEMOP_RAISE_INVALID_OPCODE();
685
686 IEM_NOT_REACHED_DEFAULT_CASE_RET();
687 }
688}
689
690/** Opcode 0x0f 0x00 /3. */
691FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
692{
693 IEMOP_HLP_NO_REAL_OR_V86_MODE();
694 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
695
696 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
697 {
698 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
699 switch (pVCpu->iem.s.enmEffOpSize)
700 {
701 case IEMMODE_16BIT:
702 {
703 IEM_MC_BEGIN(3, 0);
704 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
705 IEM_MC_ARG(uint16_t, u16Sel, 1);
706 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
707
708 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
709 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
710 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
711
712 IEM_MC_END();
713 return VINF_SUCCESS;
714 }
715
716 case IEMMODE_32BIT:
717 case IEMMODE_64BIT:
718 {
719 IEM_MC_BEGIN(3, 0);
720 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
721 IEM_MC_ARG(uint16_t, u16Sel, 1);
722 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
723
724 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
725 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
726 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
727
728 IEM_MC_END();
729 return VINF_SUCCESS;
730 }
731
732 IEM_NOT_REACHED_DEFAULT_CASE_RET();
733 }
734 }
735 else
736 {
737 switch (pVCpu->iem.s.enmEffOpSize)
738 {
739 case IEMMODE_16BIT:
740 {
741 IEM_MC_BEGIN(3, 1);
742 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
743 IEM_MC_ARG(uint16_t, u16Sel, 1);
744 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
746
747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
748 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
749
750 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
751 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
752 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
753
754 IEM_MC_END();
755 return VINF_SUCCESS;
756 }
757
758 case IEMMODE_32BIT:
759 case IEMMODE_64BIT:
760 {
761 IEM_MC_BEGIN(3, 1);
762 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
763 IEM_MC_ARG(uint16_t, u16Sel, 1);
764 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
766
767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
768 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
769/** @todo testcase: make sure it's a 16-bit read. */
770
771 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
772 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
773 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
774
775 IEM_MC_END();
776 return VINF_SUCCESS;
777 }
778
779 IEM_NOT_REACHED_DEFAULT_CASE_RET();
780 }
781 }
782}
783
784
785
786/** Opcode 0x0f 0x02. */
787FNIEMOP_DEF(iemOp_lar_Gv_Ew)
788{
789 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
790 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
791}
792
793
794/** Opcode 0x0f 0x03. */
795FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
796{
797 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
798 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
799}
800
801
802/** Opcode 0x0f 0x05. */
803FNIEMOP_DEF(iemOp_syscall)
804{
805 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
807 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
808}
809
810
811/** Opcode 0x0f 0x06. */
812FNIEMOP_DEF(iemOp_clts)
813{
814 IEMOP_MNEMONIC(clts, "clts");
815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
816 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
817}
818
819
820/** Opcode 0x0f 0x07. */
821FNIEMOP_DEF(iemOp_sysret)
822{
823 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
825 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
826}
827
828
829/** Opcode 0x0f 0x08. */
830FNIEMOP_DEF(iemOp_invd)
831{
832 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
833 IEMOP_HLP_MIN_486();
834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
835 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
836}
837
838
839/** Opcode 0x0f 0x09. */
840FNIEMOP_DEF(iemOp_wbinvd)
841{
842 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
843 IEMOP_HLP_MIN_486();
844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
845 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
846}
847
848
849/** Opcode 0x0f 0x0b. */
850FNIEMOP_DEF(iemOp_ud2)
851{
852 IEMOP_MNEMONIC(ud2, "ud2");
853 return IEMOP_RAISE_INVALID_OPCODE();
854}
855
856/** Opcode 0x0f 0x0d. */
857FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
858{
859 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
860 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
861 {
862 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
863 return IEMOP_RAISE_INVALID_OPCODE();
864 }
865
866 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
867 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
868 {
869 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
870 return IEMOP_RAISE_INVALID_OPCODE();
871 }
872
873 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
874 {
875 case 2: /* Aliased to /0 for the time being. */
876 case 4: /* Aliased to /0 for the time being. */
877 case 5: /* Aliased to /0 for the time being. */
878 case 6: /* Aliased to /0 for the time being. */
879 case 7: /* Aliased to /0 for the time being. */
880 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
881 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
882 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
883 IEM_NOT_REACHED_DEFAULT_CASE_RET();
884 }
885
886 IEM_MC_BEGIN(0, 1);
887 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
890 /* Currently a NOP. */
891 NOREF(GCPtrEffSrc);
892 IEM_MC_ADVANCE_RIP();
893 IEM_MC_END();
894 return VINF_SUCCESS;
895}
896
897
898/** Opcode 0x0f 0x0e. */
899FNIEMOP_DEF(iemOp_femms)
900{
901 IEMOP_MNEMONIC(femms, "femms");
902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
903
904 IEM_MC_BEGIN(0,0);
905 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
906 IEM_MC_MAYBE_RAISE_FPU_XCPT();
907 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
908 IEM_MC_FPU_FROM_MMX_MODE();
909 IEM_MC_ADVANCE_RIP();
910 IEM_MC_END();
911 return VINF_SUCCESS;
912}
913
914
915/** Opcode 0x0f 0x0f. */
916FNIEMOP_DEF(iemOp_3Dnow)
917{
918 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
919 {
920 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
921 return IEMOP_RAISE_INVALID_OPCODE();
922 }
923
924#ifdef IEM_WITH_3DNOW
925 /* This is pretty sparse, use switch instead of table. */
926 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
927 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
928#else
929 IEMOP_BITCH_ABOUT_STUB();
930 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
931#endif
932}
933
934
935/**
936 * @opcode 0x10
937 * @oppfx none
938 * @opcpuid sse
939 * @opgroup og_sse_simdfp_datamove
940 * @opxcpttype 4UA
941 * @optest op1=1 op2=2 -> op1=2
942 * @optest op1=0 op2=-22 -> op1=-22
943 */
944FNIEMOP_DEF(iemOp_movups_Vps_Wps)
945{
946 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
947 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
948 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
949 {
950 /*
951 * Register, register.
952 */
953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
954 IEM_MC_BEGIN(0, 0);
955 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
956 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
957 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
958 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
959 IEM_MC_ADVANCE_RIP();
960 IEM_MC_END();
961 }
962 else
963 {
964 /*
965 * Memory, register.
966 */
967 IEM_MC_BEGIN(0, 2);
968 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
970
971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
973 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
974 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
975
976 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
977 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
978
979 IEM_MC_ADVANCE_RIP();
980 IEM_MC_END();
981 }
982 return VINF_SUCCESS;
983
984}
985
986
987/**
988 * @opcode 0x10
989 * @oppfx 0x66
990 * @opcpuid sse2
991 * @opgroup og_sse2_pcksclr_datamove
992 * @opxcpttype 4UA
993 * @optest op1=1 op2=2 -> op1=2
994 * @optest op1=0 op2=-42 -> op1=-42
995 */
996FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
997{
998 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
999 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1000 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1001 {
1002 /*
1003 * Register, register.
1004 */
1005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1006 IEM_MC_BEGIN(0, 0);
1007 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1008 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1009 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1010 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1011 IEM_MC_ADVANCE_RIP();
1012 IEM_MC_END();
1013 }
1014 else
1015 {
1016 /*
1017 * Memory, register.
1018 */
1019 IEM_MC_BEGIN(0, 2);
1020 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1022
1023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1025 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1026 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1027
1028 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1029 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1030
1031 IEM_MC_ADVANCE_RIP();
1032 IEM_MC_END();
1033 }
1034 return VINF_SUCCESS;
1035}
1036
1037
1038/**
1039 * @opcode 0x10
1040 * @oppfx 0xf3
1041 * @opcpuid sse
1042 * @opgroup og_sse_simdfp_datamove
1043 * @opxcpttype 5
1044 * @optest op1=1 op2=2 -> op1=2
1045 * @optest op1=0 op2=-22 -> op1=-22
1046 */
1047FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1048{
1049 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1050 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1051 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1052 {
1053 /*
1054 * Register, register.
1055 */
1056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1057 IEM_MC_BEGIN(0, 1);
1058 IEM_MC_LOCAL(uint32_t, uSrc);
1059
1060 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1061 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1062 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1063 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1064
1065 IEM_MC_ADVANCE_RIP();
1066 IEM_MC_END();
1067 }
1068 else
1069 {
1070 /*
1071 * Memory, register.
1072 */
1073 IEM_MC_BEGIN(0, 2);
1074 IEM_MC_LOCAL(uint32_t, uSrc);
1075 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1076
1077 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1079 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1080 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1081
1082 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1083 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1084
1085 IEM_MC_ADVANCE_RIP();
1086 IEM_MC_END();
1087 }
1088 return VINF_SUCCESS;
1089}
1090
1091
1092/**
1093 * @opcode 0x10
1094 * @oppfx 0xf2
1095 * @opcpuid sse2
1096 * @opgroup og_sse2_pcksclr_datamove
1097 * @opxcpttype 5
1098 * @optest op1=1 op2=2 -> op1=2
1099 * @optest op1=0 op2=-42 -> op1=-42
1100 */
1101FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1102{
1103 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1105 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1106 {
1107 /*
1108 * Register, register.
1109 */
1110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1111 IEM_MC_BEGIN(0, 1);
1112 IEM_MC_LOCAL(uint64_t, uSrc);
1113
1114 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1115 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1116 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1117 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1118
1119 IEM_MC_ADVANCE_RIP();
1120 IEM_MC_END();
1121 }
1122 else
1123 {
1124 /*
1125 * Memory, register.
1126 */
1127 IEM_MC_BEGIN(0, 2);
1128 IEM_MC_LOCAL(uint64_t, uSrc);
1129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1130
1131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1133 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1134 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1135
1136 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1137 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1138
1139 IEM_MC_ADVANCE_RIP();
1140 IEM_MC_END();
1141 }
1142 return VINF_SUCCESS;
1143}
1144
1145
1146/**
1147 * @opcode 0x11
1148 * @oppfx none
1149 * @opcpuid sse
1150 * @opgroup og_sse_simdfp_datamove
1151 * @opxcpttype 4UA
1152 * @optest op1=1 op2=2 -> op1=2
1153 * @optest op1=0 op2=-42 -> op1=-42
1154 */
1155FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1156{
1157 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1159 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1160 {
1161 /*
1162 * Register, register.
1163 */
1164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1165 IEM_MC_BEGIN(0, 0);
1166 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1167 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1168 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1169 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1170 IEM_MC_ADVANCE_RIP();
1171 IEM_MC_END();
1172 }
1173 else
1174 {
1175 /*
1176 * Memory, register.
1177 */
1178 IEM_MC_BEGIN(0, 2);
1179 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1180 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1181
1182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1184 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1185 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1186
1187 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1188 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1189
1190 IEM_MC_ADVANCE_RIP();
1191 IEM_MC_END();
1192 }
1193 return VINF_SUCCESS;
1194}
1195
1196
1197/**
1198 * @opcode 0x11
1199 * @oppfx 0x66
1200 * @opcpuid sse2
1201 * @opgroup og_sse2_pcksclr_datamove
1202 * @opxcpttype 4UA
1203 * @optest op1=1 op2=2 -> op1=2
1204 * @optest op1=0 op2=-42 -> op1=-42
1205 */
1206FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1207{
1208 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1209 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1210 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1211 {
1212 /*
1213 * Register, register.
1214 */
1215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1216 IEM_MC_BEGIN(0, 0);
1217 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1218 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1219 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1220 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1221 IEM_MC_ADVANCE_RIP();
1222 IEM_MC_END();
1223 }
1224 else
1225 {
1226 /*
1227 * Memory, register.
1228 */
1229 IEM_MC_BEGIN(0, 2);
1230 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1232
1233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1235 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1236 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1237
1238 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1239 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1240
1241 IEM_MC_ADVANCE_RIP();
1242 IEM_MC_END();
1243 }
1244 return VINF_SUCCESS;
1245}
1246
1247
1248/**
1249 * @opcode 0x11
1250 * @oppfx 0xf3
1251 * @opcpuid sse
1252 * @opgroup og_sse_simdfp_datamove
1253 * @opxcpttype 5
1254 * @optest op1=1 op2=2 -> op1=2
1255 * @optest op1=0 op2=-22 -> op1=-22
1256 */
1257FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1258{
1259 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1260 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1261 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1262 {
1263 /*
1264 * Register, register.
1265 */
1266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1267 IEM_MC_BEGIN(0, 1);
1268 IEM_MC_LOCAL(uint32_t, uSrc);
1269
1270 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1271 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1272 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1273 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1274
1275 IEM_MC_ADVANCE_RIP();
1276 IEM_MC_END();
1277 }
1278 else
1279 {
1280 /*
1281 * Memory, register.
1282 */
1283 IEM_MC_BEGIN(0, 2);
1284 IEM_MC_LOCAL(uint32_t, uSrc);
1285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1286
1287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1289 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1290 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1291
1292 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1293 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1294
1295 IEM_MC_ADVANCE_RIP();
1296 IEM_MC_END();
1297 }
1298 return VINF_SUCCESS;
1299}
1300
1301
1302/**
1303 * @opcode 0x11
1304 * @oppfx 0xf2
1305 * @opcpuid sse2
1306 * @opgroup og_sse2_pcksclr_datamove
1307 * @opxcpttype 5
1308 * @optest op1=1 op2=2 -> op1=2
1309 * @optest op1=0 op2=-42 -> op1=-42
1310 */
1311FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1312{
1313 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1314 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1316 {
1317 /*
1318 * Register, register.
1319 */
1320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1321 IEM_MC_BEGIN(0, 1);
1322 IEM_MC_LOCAL(uint64_t, uSrc);
1323
1324 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1325 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1326 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1327 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1328
1329 IEM_MC_ADVANCE_RIP();
1330 IEM_MC_END();
1331 }
1332 else
1333 {
1334 /*
1335 * Memory, register.
1336 */
1337 IEM_MC_BEGIN(0, 2);
1338 IEM_MC_LOCAL(uint64_t, uSrc);
1339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1340
1341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1343 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1344 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1345
1346 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1347 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1348
1349 IEM_MC_ADVANCE_RIP();
1350 IEM_MC_END();
1351 }
1352 return VINF_SUCCESS;
1353}
1354
1355
1356FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1357{
1358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1359 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1360 {
1361 /**
1362 * @opcode 0x12
1363 * @opcodesub 11 mr/reg
1364 * @oppfx none
1365 * @opcpuid sse
1366 * @opgroup og_sse_simdfp_datamove
1367 * @opxcpttype 5
1368 * @optest op1=1 op2=2 -> op1=2
1369 * @optest op1=0 op2=-42 -> op1=-42
1370 */
1371 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1372
1373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1374 IEM_MC_BEGIN(0, 1);
1375 IEM_MC_LOCAL(uint64_t, uSrc);
1376
1377 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1378 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1379 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1380 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1381
1382 IEM_MC_ADVANCE_RIP();
1383 IEM_MC_END();
1384 }
1385 else
1386 {
1387 /**
1388 * @opdone
1389 * @opcode 0x12
1390 * @opcodesub !11 mr/reg
1391 * @oppfx none
1392 * @opcpuid sse
1393 * @opgroup og_sse_simdfp_datamove
1394 * @opxcpttype 5
1395 * @optest op1=1 op2=2 -> op1=2
1396 * @optest op1=0 op2=-42 -> op1=-42
1397 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1398 */
1399 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1400
1401 IEM_MC_BEGIN(0, 2);
1402 IEM_MC_LOCAL(uint64_t, uSrc);
1403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1404
1405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1407 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1408 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1409
1410 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1411 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1412
1413 IEM_MC_ADVANCE_RIP();
1414 IEM_MC_END();
1415 }
1416 return VINF_SUCCESS;
1417}
1418
1419
1420/**
1421 * @opcode 0x12
1422 * @opcodesub !11 mr/reg
1423 * @oppfx 0x66
1424 * @opcpuid sse2
1425 * @opgroup og_sse2_pcksclr_datamove
1426 * @opxcpttype 5
1427 * @optest op1=1 op2=2 -> op1=2
1428 * @optest op1=0 op2=-42 -> op1=-42
1429 */
1430FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1431{
1432 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1433 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1434 {
1435 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1436
1437 IEM_MC_BEGIN(0, 2);
1438 IEM_MC_LOCAL(uint64_t, uSrc);
1439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1440
1441 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1443 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1444 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1445
1446 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1447 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1448
1449 IEM_MC_ADVANCE_RIP();
1450 IEM_MC_END();
1451 return VINF_SUCCESS;
1452 }
1453
1454 /**
1455 * @opdone
1456 * @opmnemonic ud660f12m3
1457 * @opcode 0x12
1458 * @opcodesub 11 mr/reg
1459 * @oppfx 0x66
1460 * @opunused immediate
1461 * @opcpuid sse
1462 * @optest ->
1463 */
1464 return IEMOP_RAISE_INVALID_OPCODE();
1465}
1466
1467
1468/**
1469 * @opcode 0x12
1470 * @oppfx 0xf3
1471 * @opcpuid sse3
1472 * @opgroup og_sse3_pcksclr_datamove
1473 * @opxcpttype 4
1474 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1475 * op1=0x00000002000000020000000100000001
1476 */
1477FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1478{
1479 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1480 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1481 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1482 {
1483 /*
1484 * Register, register.
1485 */
1486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1487 IEM_MC_BEGIN(2, 0);
1488 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1489 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1490
1491 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1492 IEM_MC_PREPARE_SSE_USAGE();
1493
1494 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1495 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1496 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1497
1498 IEM_MC_ADVANCE_RIP();
1499 IEM_MC_END();
1500 }
1501 else
1502 {
1503 /*
1504 * Register, memory.
1505 */
1506 IEM_MC_BEGIN(2, 2);
1507 IEM_MC_LOCAL(RTUINT128U, uSrc);
1508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1509 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1510 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1511
1512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1514 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1515 IEM_MC_PREPARE_SSE_USAGE();
1516
1517 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1518 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1519 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1520
1521 IEM_MC_ADVANCE_RIP();
1522 IEM_MC_END();
1523 }
1524 return VINF_SUCCESS;
1525}
1526
1527
1528/**
1529 * @opcode 0x12
1530 * @oppfx 0xf2
1531 * @opcpuid sse3
1532 * @opgroup og_sse3_pcksclr_datamove
1533 * @opxcpttype 5
1534 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1535 * op1=0x22222222111111112222222211111111
1536 */
1537FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1538{
1539 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1540 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1541 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1542 {
1543 /*
1544 * Register, register.
1545 */
1546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1547 IEM_MC_BEGIN(2, 0);
1548 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1549 IEM_MC_ARG(uint64_t, uSrc, 1);
1550
1551 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1552 IEM_MC_PREPARE_SSE_USAGE();
1553
1554 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1555 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1556 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1557
1558 IEM_MC_ADVANCE_RIP();
1559 IEM_MC_END();
1560 }
1561 else
1562 {
1563 /*
1564 * Register, memory.
1565 */
1566 IEM_MC_BEGIN(2, 2);
1567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1568 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1569 IEM_MC_ARG(uint64_t, uSrc, 1);
1570
1571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1573 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1574 IEM_MC_PREPARE_SSE_USAGE();
1575
1576 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1577 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1578 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1579
1580 IEM_MC_ADVANCE_RIP();
1581 IEM_MC_END();
1582 }
1583 return VINF_SUCCESS;
1584}
1585
1586
1587/**
1588 * @opcode 0x13
1589 * @opcodesub !11 mr/reg
1590 * @oppfx none
1591 * @opcpuid sse
1592 * @opgroup og_sse_simdfp_datamove
1593 * @opxcpttype 5
1594 * @optest op1=1 op2=2 -> op1=2
1595 * @optest op1=0 op2=-42 -> op1=-42
1596 */
1597FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1598{
1599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1600 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1601 {
1602 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1603
1604 IEM_MC_BEGIN(0, 2);
1605 IEM_MC_LOCAL(uint64_t, uSrc);
1606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1607
1608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1610 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1611 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1612
1613 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1614 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1615
1616 IEM_MC_ADVANCE_RIP();
1617 IEM_MC_END();
1618 return VINF_SUCCESS;
1619 }
1620
1621 /**
1622 * @opdone
1623 * @opmnemonic ud0f13m3
1624 * @opcode 0x13
1625 * @opcodesub 11 mr/reg
1626 * @oppfx none
1627 * @opunused immediate
1628 * @opcpuid sse
1629 * @optest ->
1630 */
1631 return IEMOP_RAISE_INVALID_OPCODE();
1632}
1633
1634
1635/**
1636 * @opcode 0x13
1637 * @opcodesub !11 mr/reg
1638 * @oppfx 0x66
1639 * @opcpuid sse2
1640 * @opgroup og_sse2_pcksclr_datamove
1641 * @opxcpttype 5
1642 * @optest op1=1 op2=2 -> op1=2
1643 * @optest op1=0 op2=-42 -> op1=-42
1644 */
1645FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1646{
1647 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1648 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1649 {
1650 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1651 IEM_MC_BEGIN(0, 2);
1652 IEM_MC_LOCAL(uint64_t, uSrc);
1653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1654
1655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1657 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1658 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1659
1660 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1661 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1662
1663 IEM_MC_ADVANCE_RIP();
1664 IEM_MC_END();
1665 return VINF_SUCCESS;
1666 }
1667
1668 /**
1669 * @opdone
1670 * @opmnemonic ud660f13m3
1671 * @opcode 0x13
1672 * @opcodesub 11 mr/reg
1673 * @oppfx 0x66
1674 * @opunused immediate
1675 * @opcpuid sse
1676 * @optest ->
1677 */
1678 return IEMOP_RAISE_INVALID_OPCODE();
1679}
1680
1681
1682/**
1683 * @opmnemonic udf30f13
1684 * @opcode 0x13
1685 * @oppfx 0xf3
1686 * @opunused intel-modrm
1687 * @opcpuid sse
1688 * @optest ->
1689 * @opdone
1690 */
1691
1692/**
1693 * @opmnemonic udf20f13
1694 * @opcode 0x13
1695 * @oppfx 0xf2
1696 * @opunused intel-modrm
1697 * @opcpuid sse
1698 * @optest ->
1699 * @opdone
1700 */
1701
1702/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1703FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1704/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1705FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1706
1707/**
1708 * @opdone
1709 * @opmnemonic udf30f14
1710 * @opcode 0x14
1711 * @oppfx 0xf3
1712 * @opunused intel-modrm
1713 * @opcpuid sse
1714 * @optest ->
1715 * @opdone
1716 */
1717
1718/**
1719 * @opmnemonic udf20f14
1720 * @opcode 0x14
1721 * @oppfx 0xf2
1722 * @opunused intel-modrm
1723 * @opcpuid sse
1724 * @optest ->
1725 * @opdone
1726 */
1727
1728/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1729FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1730/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1731FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1732/* Opcode 0xf3 0x0f 0x15 - invalid */
1733/* Opcode 0xf2 0x0f 0x15 - invalid */
1734
1735/**
1736 * @opdone
1737 * @opmnemonic udf30f15
1738 * @opcode 0x15
1739 * @oppfx 0xf3
1740 * @opunused intel-modrm
1741 * @opcpuid sse
1742 * @optest ->
1743 * @opdone
1744 */
1745
1746/**
1747 * @opmnemonic udf20f15
1748 * @opcode 0x15
1749 * @oppfx 0xf2
1750 * @opunused intel-modrm
1751 * @opcpuid sse
1752 * @optest ->
1753 * @opdone
1754 */
1755
1756FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1757{
1758 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1759 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1760 {
1761 /**
1762 * @opcode 0x16
1763 * @opcodesub 11 mr/reg
1764 * @oppfx none
1765 * @opcpuid sse
1766 * @opgroup og_sse_simdfp_datamove
1767 * @opxcpttype 5
1768 * @optest op1=1 op2=2 -> op1=2
1769 * @optest op1=0 op2=-42 -> op1=-42
1770 */
1771 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1772
1773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1774 IEM_MC_BEGIN(0, 1);
1775 IEM_MC_LOCAL(uint64_t, uSrc);
1776
1777 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1778 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1779 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1780 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1781
1782 IEM_MC_ADVANCE_RIP();
1783 IEM_MC_END();
1784 }
1785 else
1786 {
1787 /**
1788 * @opdone
1789 * @opcode 0x16
1790 * @opcodesub !11 mr/reg
1791 * @oppfx none
1792 * @opcpuid sse
1793 * @opgroup og_sse_simdfp_datamove
1794 * @opxcpttype 5
1795 * @optest op1=1 op2=2 -> op1=2
1796 * @optest op1=0 op2=-42 -> op1=-42
1797 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1798 */
1799 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1800
1801 IEM_MC_BEGIN(0, 2);
1802 IEM_MC_LOCAL(uint64_t, uSrc);
1803 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1804
1805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1807 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1808 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1809
1810 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1811 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1812
1813 IEM_MC_ADVANCE_RIP();
1814 IEM_MC_END();
1815 }
1816 return VINF_SUCCESS;
1817}
1818
1819
1820/**
1821 * @opcode 0x16
1822 * @opcodesub !11 mr/reg
1823 * @oppfx 0x66
1824 * @opcpuid sse2
1825 * @opgroup og_sse2_pcksclr_datamove
1826 * @opxcpttype 5
1827 * @optest op1=1 op2=2 -> op1=2
1828 * @optest op1=0 op2=-42 -> op1=-42
1829 */
1830FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1831{
1832 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1833 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1834 {
1835 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1836 IEM_MC_BEGIN(0, 2);
1837 IEM_MC_LOCAL(uint64_t, uSrc);
1838 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1839
1840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1842 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1843 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1844
1845 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1846 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1847
1848 IEM_MC_ADVANCE_RIP();
1849 IEM_MC_END();
1850 return VINF_SUCCESS;
1851 }
1852
1853 /**
1854 * @opdone
1855 * @opmnemonic ud660f16m3
1856 * @opcode 0x16
1857 * @opcodesub 11 mr/reg
1858 * @oppfx 0x66
1859 * @opunused immediate
1860 * @opcpuid sse
1861 * @optest ->
1862 */
1863 return IEMOP_RAISE_INVALID_OPCODE();
1864}
1865
1866
1867/**
1868 * @opcode 0x16
1869 * @oppfx 0xf3
1870 * @opcpuid sse3
1871 * @opgroup og_sse3_pcksclr_datamove
1872 * @opxcpttype 4
1873 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1874 * op1=0x00000002000000020000000100000001
1875 */
1876FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1877{
1878 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1880 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1881 {
1882 /*
1883 * Register, register.
1884 */
1885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1886 IEM_MC_BEGIN(2, 0);
1887 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1888 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1889
1890 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1891 IEM_MC_PREPARE_SSE_USAGE();
1892
1893 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1894 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1895 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1896
1897 IEM_MC_ADVANCE_RIP();
1898 IEM_MC_END();
1899 }
1900 else
1901 {
1902 /*
1903 * Register, memory.
1904 */
1905 IEM_MC_BEGIN(2, 2);
1906 IEM_MC_LOCAL(RTUINT128U, uSrc);
1907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1908 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1909 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1910
1911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1913 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1914 IEM_MC_PREPARE_SSE_USAGE();
1915
1916 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1917 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1918 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1919
1920 IEM_MC_ADVANCE_RIP();
1921 IEM_MC_END();
1922 }
1923 return VINF_SUCCESS;
1924}
1925
1926/**
1927 * @opdone
1928 * @opmnemonic udf30f16
1929 * @opcode 0x16
1930 * @oppfx 0xf2
1931 * @opunused intel-modrm
1932 * @opcpuid sse
1933 * @optest ->
1934 * @opdone
1935 */
1936
1937
1938/**
1939 * @opcode 0x17
1940 * @opcodesub !11 mr/reg
1941 * @oppfx none
1942 * @opcpuid sse
1943 * @opgroup og_sse_simdfp_datamove
1944 * @opxcpttype 5
1945 * @optest op1=1 op2=2 -> op1=2
1946 * @optest op1=0 op2=-42 -> op1=-42
1947 */
1948FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
1949{
1950 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1951 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1952 {
1953 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1954
1955 IEM_MC_BEGIN(0, 2);
1956 IEM_MC_LOCAL(uint64_t, uSrc);
1957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1958
1959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1961 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1962 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1963
1964 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1965 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1966
1967 IEM_MC_ADVANCE_RIP();
1968 IEM_MC_END();
1969 return VINF_SUCCESS;
1970 }
1971
1972 /**
1973 * @opdone
1974 * @opmnemonic ud0f17m3
1975 * @opcode 0x17
1976 * @opcodesub 11 mr/reg
1977 * @oppfx none
1978 * @opunused immediate
1979 * @opcpuid sse
1980 * @optest ->
1981 */
1982 return IEMOP_RAISE_INVALID_OPCODE();
1983}
1984
1985
1986/**
1987 * @opcode 0x17
1988 * @opcodesub !11 mr/reg
1989 * @oppfx 0x66
1990 * @opcpuid sse2
1991 * @opgroup og_sse2_pcksclr_datamove
1992 * @opxcpttype 5
1993 * @optest op1=1 op2=2 -> op1=2
1994 * @optest op1=0 op2=-42 -> op1=-42
1995 */
1996FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
1997{
1998 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1999 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2000 {
2001 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2002
2003 IEM_MC_BEGIN(0, 2);
2004 IEM_MC_LOCAL(uint64_t, uSrc);
2005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2006
2007 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2009 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2010 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2011
2012 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2013 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2014
2015 IEM_MC_ADVANCE_RIP();
2016 IEM_MC_END();
2017 return VINF_SUCCESS;
2018 }
2019
2020 /**
2021 * @opdone
2022 * @opmnemonic ud660f17m3
2023 * @opcode 0x17
2024 * @opcodesub 11 mr/reg
2025 * @oppfx 0x66
2026 * @opunused immediate
2027 * @opcpuid sse
2028 * @optest ->
2029 */
2030 return IEMOP_RAISE_INVALID_OPCODE();
2031}
2032
2033
2034/**
2035 * @opdone
2036 * @opmnemonic udf30f17
2037 * @opcode 0x17
2038 * @oppfx 0xf3
2039 * @opunused intel-modrm
2040 * @opcpuid sse
2041 * @optest ->
2042 * @opdone
2043 */
2044
2045/**
2046 * @opmnemonic udf20f17
2047 * @opcode 0x17
2048 * @oppfx 0xf2
2049 * @opunused intel-modrm
2050 * @opcpuid sse
2051 * @optest ->
2052 * @opdone
2053 */
2054
2055
2056/** Opcode 0x0f 0x18. */
2057FNIEMOP_DEF(iemOp_prefetch_Grp16)
2058{
2059 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2060 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2061 {
2062 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2063 {
2064 case 4: /* Aliased to /0 for the time being according to AMD. */
2065 case 5: /* Aliased to /0 for the time being according to AMD. */
2066 case 6: /* Aliased to /0 for the time being according to AMD. */
2067 case 7: /* Aliased to /0 for the time being according to AMD. */
2068 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2069 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2070 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2071 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2072 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2073 }
2074
2075 IEM_MC_BEGIN(0, 1);
2076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2077 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2079 /* Currently a NOP. */
2080 NOREF(GCPtrEffSrc);
2081 IEM_MC_ADVANCE_RIP();
2082 IEM_MC_END();
2083 return VINF_SUCCESS;
2084 }
2085
2086 return IEMOP_RAISE_INVALID_OPCODE();
2087}
2088
2089
2090/** Opcode 0x0f 0x19..0x1f. */
2091FNIEMOP_DEF(iemOp_nop_Ev)
2092{
2093 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2094 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2095 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2096 {
2097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2098 IEM_MC_BEGIN(0, 0);
2099 IEM_MC_ADVANCE_RIP();
2100 IEM_MC_END();
2101 }
2102 else
2103 {
2104 IEM_MC_BEGIN(0, 1);
2105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2108 /* Currently a NOP. */
2109 NOREF(GCPtrEffSrc);
2110 IEM_MC_ADVANCE_RIP();
2111 IEM_MC_END();
2112 }
2113 return VINF_SUCCESS;
2114}
2115
2116
2117/** Opcode 0x0f 0x20. */
2118FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2119{
2120 /* mod is ignored, as is operand size overrides. */
2121 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2122 IEMOP_HLP_MIN_386();
2123 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2124 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2125 else
2126 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2127
2128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2129 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2130 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2131 {
2132 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2133 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2134 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2135 iCrReg |= 8;
2136 }
2137 switch (iCrReg)
2138 {
2139 case 0: case 2: case 3: case 4: case 8:
2140 break;
2141 default:
2142 return IEMOP_RAISE_INVALID_OPCODE();
2143 }
2144 IEMOP_HLP_DONE_DECODING();
2145
2146 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2147}
2148
2149
2150/** Opcode 0x0f 0x21. */
2151FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2152{
2153 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2154 IEMOP_HLP_MIN_386();
2155 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2157 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2158 return IEMOP_RAISE_INVALID_OPCODE();
2159 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2160 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2161 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2162}
2163
2164
2165/** Opcode 0x0f 0x22. */
2166FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2167{
2168 /* mod is ignored, as is operand size overrides. */
2169 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2170 IEMOP_HLP_MIN_386();
2171 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2172 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2173 else
2174 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2175
2176 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2177 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2178 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2179 {
2180 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2181 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2182 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2183 iCrReg |= 8;
2184 }
2185 switch (iCrReg)
2186 {
2187 case 0: case 2: case 3: case 4: case 8:
2188 break;
2189 default:
2190 return IEMOP_RAISE_INVALID_OPCODE();
2191 }
2192 IEMOP_HLP_DONE_DECODING();
2193
2194 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2195}
2196
2197
2198/** Opcode 0x0f 0x23. */
2199FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2200{
2201 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2202 IEMOP_HLP_MIN_386();
2203 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2205 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2206 return IEMOP_RAISE_INVALID_OPCODE();
2207 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2208 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2209 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2210}
2211
2212
2213/** Opcode 0x0f 0x24. */
2214FNIEMOP_DEF(iemOp_mov_Rd_Td)
2215{
2216 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2217 /** @todo works on 386 and 486. */
2218 /* The RM byte is not considered, see testcase. */
2219 return IEMOP_RAISE_INVALID_OPCODE();
2220}
2221
2222
2223/** Opcode 0x0f 0x26. */
2224FNIEMOP_DEF(iemOp_mov_Td_Rd)
2225{
2226 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2227 /** @todo works on 386 and 486. */
2228 /* The RM byte is not considered, see testcase. */
2229 return IEMOP_RAISE_INVALID_OPCODE();
2230}
2231
2232
2233/**
2234 * @opcode 0x28
2235 * @oppfx none
2236 * @opcpuid sse
2237 * @opgroup og_sse_simdfp_datamove
2238 * @opxcpttype 1
2239 * @optest op1=1 op2=2 -> op1=2
2240 * @optest op1=0 op2=-42 -> op1=-42
2241 */
2242FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2243{
2244 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2245 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2246 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2247 {
2248 /*
2249 * Register, register.
2250 */
2251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2252 IEM_MC_BEGIN(0, 0);
2253 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2254 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2255 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2256 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2257 IEM_MC_ADVANCE_RIP();
2258 IEM_MC_END();
2259 }
2260 else
2261 {
2262 /*
2263 * Register, memory.
2264 */
2265 IEM_MC_BEGIN(0, 2);
2266 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2268
2269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2271 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2272 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2273
2274 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2275 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2276
2277 IEM_MC_ADVANCE_RIP();
2278 IEM_MC_END();
2279 }
2280 return VINF_SUCCESS;
2281}
2282
2283/**
2284 * @opcode 0x28
2285 * @oppfx 66
2286 * @opcpuid sse2
2287 * @opgroup og_sse2_pcksclr_datamove
2288 * @opxcpttype 1
2289 * @optest op1=1 op2=2 -> op1=2
2290 * @optest op1=0 op2=-42 -> op1=-42
2291 */
2292FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2293{
2294 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2295 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2296 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2297 {
2298 /*
2299 * Register, register.
2300 */
2301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2302 IEM_MC_BEGIN(0, 0);
2303 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2304 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2305 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2306 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2307 IEM_MC_ADVANCE_RIP();
2308 IEM_MC_END();
2309 }
2310 else
2311 {
2312 /*
2313 * Register, memory.
2314 */
2315 IEM_MC_BEGIN(0, 2);
2316 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2318
2319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2321 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2322 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2323
2324 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2325 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2326
2327 IEM_MC_ADVANCE_RIP();
2328 IEM_MC_END();
2329 }
2330 return VINF_SUCCESS;
2331}
2332
2333/* Opcode 0xf3 0x0f 0x28 - invalid */
2334/* Opcode 0xf2 0x0f 0x28 - invalid */
2335
2336/**
2337 * @opcode 0x29
2338 * @oppfx none
2339 * @opcpuid sse
2340 * @opgroup og_sse_simdfp_datamove
2341 * @opxcpttype 1
2342 * @optest op1=1 op2=2 -> op1=2
2343 * @optest op1=0 op2=-42 -> op1=-42
2344 */
2345FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2346{
2347 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2348 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2349 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2350 {
2351 /*
2352 * Register, register.
2353 */
2354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2355 IEM_MC_BEGIN(0, 0);
2356 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2357 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2358 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2359 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2360 IEM_MC_ADVANCE_RIP();
2361 IEM_MC_END();
2362 }
2363 else
2364 {
2365 /*
2366 * Memory, register.
2367 */
2368 IEM_MC_BEGIN(0, 2);
2369 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2371
2372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2374 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2375 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2376
2377 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2378 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2379
2380 IEM_MC_ADVANCE_RIP();
2381 IEM_MC_END();
2382 }
2383 return VINF_SUCCESS;
2384}
2385
2386/**
2387 * @opcode 0x29
2388 * @oppfx 66
2389 * @opcpuid sse2
2390 * @opgroup og_sse2_pcksclr_datamove
2391 * @opxcpttype 1
2392 * @optest op1=1 op2=2 -> op1=2
2393 * @optest op1=0 op2=-42 -> op1=-42
2394 */
2395FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2396{
2397 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2399 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2400 {
2401 /*
2402 * Register, register.
2403 */
2404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2405 IEM_MC_BEGIN(0, 0);
2406 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2407 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2408 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2409 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2410 IEM_MC_ADVANCE_RIP();
2411 IEM_MC_END();
2412 }
2413 else
2414 {
2415 /*
2416 * Memory, register.
2417 */
2418 IEM_MC_BEGIN(0, 2);
2419 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2420 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2421
2422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2424 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2425 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2426
2427 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2428 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2429
2430 IEM_MC_ADVANCE_RIP();
2431 IEM_MC_END();
2432 }
2433 return VINF_SUCCESS;
2434}
2435
2436/* Opcode 0xf3 0x0f 0x29 - invalid */
2437/* Opcode 0xf2 0x0f 0x29 - invalid */
2438
2439
2440/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2441FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2442/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2443FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2444/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2445FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2446/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2447FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2448
2449
2450/**
2451 * @opcode 0x2b
2452 * @opcodesub !11 mr/reg
2453 * @oppfx none
2454 * @opcpuid sse
2455 * @opgroup og_sse1_cachect
2456 * @opxcpttype 1
2457 * @optest op1=1 op2=2 -> op1=2
2458 * @optest op1=0 op2=-42 -> op1=-42
2459 */
2460FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2461{
2462 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2463 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2464 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2465 {
2466 /*
2467 * memory, register.
2468 */
2469 IEM_MC_BEGIN(0, 2);
2470 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2472
2473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2475 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2476 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2477
2478 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2479 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2480
2481 IEM_MC_ADVANCE_RIP();
2482 IEM_MC_END();
2483 }
2484 /* The register, register encoding is invalid. */
2485 else
2486 return IEMOP_RAISE_INVALID_OPCODE();
2487 return VINF_SUCCESS;
2488}
2489
2490/**
2491 * @opcode 0x2b
2492 * @opcodesub !11 mr/reg
2493 * @oppfx 0x66
2494 * @opcpuid sse2
2495 * @opgroup og_sse2_cachect
2496 * @opxcpttype 1
2497 * @optest op1=1 op2=2 -> op1=2
2498 * @optest op1=0 op2=-42 -> op1=-42
2499 */
2500FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2501{
2502 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2504 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2505 {
2506 /*
2507 * memory, register.
2508 */
2509 IEM_MC_BEGIN(0, 2);
2510 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2512
2513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2515 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2516 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2517
2518 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2519 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2520
2521 IEM_MC_ADVANCE_RIP();
2522 IEM_MC_END();
2523 }
2524 /* The register, register encoding is invalid. */
2525 else
2526 return IEMOP_RAISE_INVALID_OPCODE();
2527 return VINF_SUCCESS;
2528}
2529/* Opcode 0xf3 0x0f 0x2b - invalid */
2530/* Opcode 0xf2 0x0f 0x2b - invalid */
2531
2532
2533/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2534FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2535/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2536FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2537/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2538FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2539/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2540FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2541
2542/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2543FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2544/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2545FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2546/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2547FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2548/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2549FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2550
2551/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2552FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2553/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2554FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2555/* Opcode 0xf3 0x0f 0x2e - invalid */
2556/* Opcode 0xf2 0x0f 0x2e - invalid */
2557
2558/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2559FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2560/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2561FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2562/* Opcode 0xf3 0x0f 0x2f - invalid */
2563/* Opcode 0xf2 0x0f 0x2f - invalid */
2564
2565/** Opcode 0x0f 0x30. */
2566FNIEMOP_DEF(iemOp_wrmsr)
2567{
2568 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2570 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2571}
2572
2573
2574/** Opcode 0x0f 0x31. */
2575FNIEMOP_DEF(iemOp_rdtsc)
2576{
2577 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2579 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2580}
2581
2582
2583/** Opcode 0x0f 0x33. */
2584FNIEMOP_DEF(iemOp_rdmsr)
2585{
2586 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2588 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2589}
2590
2591
2592/** Opcode 0x0f 0x34. */
2593FNIEMOP_DEF(iemOp_rdpmc)
2594{
2595 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2597 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2598}
2599
2600
2601/** Opcode 0x0f 0x34. */
2602FNIEMOP_STUB(iemOp_sysenter);
2603/** Opcode 0x0f 0x35. */
2604FNIEMOP_STUB(iemOp_sysexit);
2605/** Opcode 0x0f 0x37. */
2606FNIEMOP_STUB(iemOp_getsec);
2607
2608
2609/** Opcode 0x0f 0x38. */
2610FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2611{
2612#ifdef IEM_WITH_THREE_0F_38
2613 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2614 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2615#else
2616 IEMOP_BITCH_ABOUT_STUB();
2617 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2618#endif
2619}
2620
2621
2622/** Opcode 0x0f 0x3a. */
2623FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2624{
2625#ifdef IEM_WITH_THREE_0F_3A
2626 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2627 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2628#else
2629 IEMOP_BITCH_ABOUT_STUB();
2630 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2631#endif
2632}
2633
2634
2635/**
2636 * Implements a conditional move.
2637 *
2638 * Wish there was an obvious way to do this where we could share and reduce
2639 * code bloat.
2640 *
2641 * @param a_Cnd The conditional "microcode" operation.
2642 */
2643#define CMOV_X(a_Cnd) \
2644 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2645 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2646 { \
2647 switch (pVCpu->iem.s.enmEffOpSize) \
2648 { \
2649 case IEMMODE_16BIT: \
2650 IEM_MC_BEGIN(0, 1); \
2651 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2652 a_Cnd { \
2653 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2654 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2655 } IEM_MC_ENDIF(); \
2656 IEM_MC_ADVANCE_RIP(); \
2657 IEM_MC_END(); \
2658 return VINF_SUCCESS; \
2659 \
2660 case IEMMODE_32BIT: \
2661 IEM_MC_BEGIN(0, 1); \
2662 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2663 a_Cnd { \
2664 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2665 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2666 } IEM_MC_ELSE() { \
2667 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2668 } IEM_MC_ENDIF(); \
2669 IEM_MC_ADVANCE_RIP(); \
2670 IEM_MC_END(); \
2671 return VINF_SUCCESS; \
2672 \
2673 case IEMMODE_64BIT: \
2674 IEM_MC_BEGIN(0, 1); \
2675 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2676 a_Cnd { \
2677 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2678 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2679 } IEM_MC_ENDIF(); \
2680 IEM_MC_ADVANCE_RIP(); \
2681 IEM_MC_END(); \
2682 return VINF_SUCCESS; \
2683 \
2684 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2685 } \
2686 } \
2687 else \
2688 { \
2689 switch (pVCpu->iem.s.enmEffOpSize) \
2690 { \
2691 case IEMMODE_16BIT: \
2692 IEM_MC_BEGIN(0, 2); \
2693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2694 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2696 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2697 a_Cnd { \
2698 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2699 } IEM_MC_ENDIF(); \
2700 IEM_MC_ADVANCE_RIP(); \
2701 IEM_MC_END(); \
2702 return VINF_SUCCESS; \
2703 \
2704 case IEMMODE_32BIT: \
2705 IEM_MC_BEGIN(0, 2); \
2706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2707 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2709 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2710 a_Cnd { \
2711 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2712 } IEM_MC_ELSE() { \
2713 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2714 } IEM_MC_ENDIF(); \
2715 IEM_MC_ADVANCE_RIP(); \
2716 IEM_MC_END(); \
2717 return VINF_SUCCESS; \
2718 \
2719 case IEMMODE_64BIT: \
2720 IEM_MC_BEGIN(0, 2); \
2721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2722 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2723 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2724 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2725 a_Cnd { \
2726 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2727 } IEM_MC_ENDIF(); \
2728 IEM_MC_ADVANCE_RIP(); \
2729 IEM_MC_END(); \
2730 return VINF_SUCCESS; \
2731 \
2732 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2733 } \
2734 } do {} while (0)
2735
2736
2737
2738/** Opcode 0x0f 0x40. */
2739FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2740{
2741 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2742 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2743}
2744
2745
2746/** Opcode 0x0f 0x41. */
2747FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2748{
2749 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2750 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2751}
2752
2753
2754/** Opcode 0x0f 0x42. */
2755FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2756{
2757 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2758 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2759}
2760
2761
2762/** Opcode 0x0f 0x43. */
2763FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2764{
2765 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2766 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2767}
2768
2769
2770/** Opcode 0x0f 0x44. */
2771FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2772{
2773 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2774 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2775}
2776
2777
2778/** Opcode 0x0f 0x45. */
2779FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2780{
2781 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2782 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2783}
2784
2785
2786/** Opcode 0x0f 0x46. */
2787FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2788{
2789 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2790 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2791}
2792
2793
2794/** Opcode 0x0f 0x47. */
2795FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2796{
2797 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2798 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2799}
2800
2801
2802/** Opcode 0x0f 0x48. */
2803FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2804{
2805 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2806 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2807}
2808
2809
2810/** Opcode 0x0f 0x49. */
2811FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2812{
2813 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2814 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2815}
2816
2817
2818/** Opcode 0x0f 0x4a. */
2819FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2820{
2821 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2822 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2823}
2824
2825
2826/** Opcode 0x0f 0x4b. */
2827FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2828{
2829 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2830 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2831}
2832
2833
2834/** Opcode 0x0f 0x4c. */
2835FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2836{
2837 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2838 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2839}
2840
2841
2842/** Opcode 0x0f 0x4d. */
2843FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2844{
2845 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2846 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2847}
2848
2849
2850/** Opcode 0x0f 0x4e. */
2851FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2852{
2853 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2854 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2855}
2856
2857
2858/** Opcode 0x0f 0x4f. */
2859FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2860{
2861 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2862 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2863}
2864
2865#undef CMOV_X
2866
2867/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2868FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2869/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2870FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2871/* Opcode 0xf3 0x0f 0x50 - invalid */
2872/* Opcode 0xf2 0x0f 0x50 - invalid */
2873
2874/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2875FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2876/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2877FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2878/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2879FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2880/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2881FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2882
2883/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2884FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2885/* Opcode 0x66 0x0f 0x52 - invalid */
2886/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2887FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2888/* Opcode 0xf2 0x0f 0x52 - invalid */
2889
2890/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2891FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2892/* Opcode 0x66 0x0f 0x53 - invalid */
2893/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2894FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2895/* Opcode 0xf2 0x0f 0x53 - invalid */
2896
2897/** Opcode 0x0f 0x54 - andps Vps, Wps */
2898FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2899/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2900FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2901/* Opcode 0xf3 0x0f 0x54 - invalid */
2902/* Opcode 0xf2 0x0f 0x54 - invalid */
2903
2904/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2905FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2906/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2907FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2908/* Opcode 0xf3 0x0f 0x55 - invalid */
2909/* Opcode 0xf2 0x0f 0x55 - invalid */
2910
2911/** Opcode 0x0f 0x56 - orps Vps, Wps */
2912FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2913/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2914FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2915/* Opcode 0xf3 0x0f 0x56 - invalid */
2916/* Opcode 0xf2 0x0f 0x56 - invalid */
2917
2918/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2919FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2920/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2921FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2922/* Opcode 0xf3 0x0f 0x57 - invalid */
2923/* Opcode 0xf2 0x0f 0x57 - invalid */
2924
2925/** Opcode 0x0f 0x58 - addps Vps, Wps */
2926FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2927/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2928FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2929/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2930FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2931/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2932FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2933
2934/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2935FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2936/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2937FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2938/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2939FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2940/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2941FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2942
2943/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2944FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2945/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2946FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2947/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2948FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2949/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2950FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2951
2952/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2953FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2954/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2955FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
2956/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
2957FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
2958/* Opcode 0xf2 0x0f 0x5b - invalid */
2959
2960/** Opcode 0x0f 0x5c - subps Vps, Wps */
2961FNIEMOP_STUB(iemOp_subps_Vps_Wps);
2962/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
2963FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
2964/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
2965FNIEMOP_STUB(iemOp_subss_Vss_Wss);
2966/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
2967FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
2968
2969/** Opcode 0x0f 0x5d - minps Vps, Wps */
2970FNIEMOP_STUB(iemOp_minps_Vps_Wps);
2971/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
2972FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
2973/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
2974FNIEMOP_STUB(iemOp_minss_Vss_Wss);
2975/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
2976FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
2977
2978/** Opcode 0x0f 0x5e - divps Vps, Wps */
2979FNIEMOP_STUB(iemOp_divps_Vps_Wps);
2980/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
2981FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
2982/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
2983FNIEMOP_STUB(iemOp_divss_Vss_Wss);
2984/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
2985FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
2986
2987/** Opcode 0x0f 0x5f - maxps Vps, Wps */
2988FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
2989/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
2990FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
2991/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
2992FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
2993/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
2994FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
2995
2996/**
2997 * Common worker for MMX instructions on the forms:
2998 * pxxxx mm1, mm2/mem32
2999 *
3000 * The 2nd operand is the first half of a register, which in the memory case
3001 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3002 * memory accessed for MMX.
3003 *
3004 * Exceptions type 4.
3005 */
3006FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3007{
3008 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3009 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3010 {
3011 /*
3012 * Register, register.
3013 */
3014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3015 IEM_MC_BEGIN(2, 0);
3016 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3017 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3018 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3019 IEM_MC_PREPARE_SSE_USAGE();
3020 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3021 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3022 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3023 IEM_MC_ADVANCE_RIP();
3024 IEM_MC_END();
3025 }
3026 else
3027 {
3028 /*
3029 * Register, memory.
3030 */
3031 IEM_MC_BEGIN(2, 2);
3032 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3033 IEM_MC_LOCAL(uint64_t, uSrc);
3034 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3036
3037 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3039 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3040 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3041
3042 IEM_MC_PREPARE_SSE_USAGE();
3043 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3044 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3045
3046 IEM_MC_ADVANCE_RIP();
3047 IEM_MC_END();
3048 }
3049 return VINF_SUCCESS;
3050}
3051
3052
3053/**
3054 * Common worker for SSE2 instructions on the forms:
3055 * pxxxx xmm1, xmm2/mem128
3056 *
3057 * The 2nd operand is the first half of a register, which in the memory case
3058 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3059 * memory accessed for MMX.
3060 *
3061 * Exceptions type 4.
3062 */
3063FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3064{
3065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3066 if (!pImpl->pfnU64)
3067 return IEMOP_RAISE_INVALID_OPCODE();
3068 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3069 {
3070 /*
3071 * Register, register.
3072 */
3073 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3074 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3076 IEM_MC_BEGIN(2, 0);
3077 IEM_MC_ARG(uint64_t *, pDst, 0);
3078 IEM_MC_ARG(uint32_t const *, pSrc, 1);
3079 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3080 IEM_MC_PREPARE_FPU_USAGE();
3081 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3082 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3083 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3084 IEM_MC_ADVANCE_RIP();
3085 IEM_MC_END();
3086 }
3087 else
3088 {
3089 /*
3090 * Register, memory.
3091 */
3092 IEM_MC_BEGIN(2, 2);
3093 IEM_MC_ARG(uint64_t *, pDst, 0);
3094 IEM_MC_LOCAL(uint32_t, uSrc);
3095 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3097
3098 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3100 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3101 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3102
3103 IEM_MC_PREPARE_FPU_USAGE();
3104 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3105 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3106
3107 IEM_MC_ADVANCE_RIP();
3108 IEM_MC_END();
3109 }
3110 return VINF_SUCCESS;
3111}
3112
3113
3114/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3115FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3116{
3117 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3118 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3119}
3120
3121/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3122FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3123{
3124 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3125 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3126}
3127
3128/* Opcode 0xf3 0x0f 0x60 - invalid */
3129
3130
3131/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3132FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3133{
3134 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3135 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3136}
3137
3138/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3139FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3140{
3141 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3142 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3143}
3144
3145/* Opcode 0xf3 0x0f 0x61 - invalid */
3146
3147
3148/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3149FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3150{
3151 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3152 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3153}
3154
3155/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3156FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3157{
3158 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3159 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3160}
3161
3162/* Opcode 0xf3 0x0f 0x62 - invalid */
3163
3164
3165
3166/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3167FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3168/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3169FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3170/* Opcode 0xf3 0x0f 0x63 - invalid */
3171
3172/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3173FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3174/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3175FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3176/* Opcode 0xf3 0x0f 0x64 - invalid */
3177
3178/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3179FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3180/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3181FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3182/* Opcode 0xf3 0x0f 0x65 - invalid */
3183
3184/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3185FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3186/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3187FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3188/* Opcode 0xf3 0x0f 0x66 - invalid */
3189
3190/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3191FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3192/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3193FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3194/* Opcode 0xf3 0x0f 0x67 - invalid */
3195
3196
3197/**
3198 * Common worker for MMX instructions on the form:
3199 * pxxxx mm1, mm2/mem64
3200 *
3201 * The 2nd operand is the second half of a register, which in the memory case
3202 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3203 * where it may read the full 128 bits or only the upper 64 bits.
3204 *
3205 * Exceptions type 4.
3206 */
3207FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3208{
3209 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3210 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3211 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3212 {
3213 /*
3214 * Register, register.
3215 */
3216 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3217 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3219 IEM_MC_BEGIN(2, 0);
3220 IEM_MC_ARG(uint64_t *, pDst, 0);
3221 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3222 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3223 IEM_MC_PREPARE_FPU_USAGE();
3224 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3225 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3226 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3227 IEM_MC_ADVANCE_RIP();
3228 IEM_MC_END();
3229 }
3230 else
3231 {
3232 /*
3233 * Register, memory.
3234 */
3235 IEM_MC_BEGIN(2, 2);
3236 IEM_MC_ARG(uint64_t *, pDst, 0);
3237 IEM_MC_LOCAL(uint64_t, uSrc);
3238 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3240
3241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3243 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3244 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3245
3246 IEM_MC_PREPARE_FPU_USAGE();
3247 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3248 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3249
3250 IEM_MC_ADVANCE_RIP();
3251 IEM_MC_END();
3252 }
3253 return VINF_SUCCESS;
3254}
3255
3256
3257/**
3258 * Common worker for SSE2 instructions on the form:
3259 * pxxxx xmm1, xmm2/mem128
3260 *
3261 * The 2nd operand is the second half of a register, which in the memory case
3262 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3263 * where it may read the full 128 bits or only the upper 64 bits.
3264 *
3265 * Exceptions type 4.
3266 */
3267FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3268{
3269 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3270 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3271 {
3272 /*
3273 * Register, register.
3274 */
3275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3276 IEM_MC_BEGIN(2, 0);
3277 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3278 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3279 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3280 IEM_MC_PREPARE_SSE_USAGE();
3281 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3282 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3283 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3284 IEM_MC_ADVANCE_RIP();
3285 IEM_MC_END();
3286 }
3287 else
3288 {
3289 /*
3290 * Register, memory.
3291 */
3292 IEM_MC_BEGIN(2, 2);
3293 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3294 IEM_MC_LOCAL(RTUINT128U, uSrc);
3295 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3297
3298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3300 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3301 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3302
3303 IEM_MC_PREPARE_SSE_USAGE();
3304 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3305 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3306
3307 IEM_MC_ADVANCE_RIP();
3308 IEM_MC_END();
3309 }
3310 return VINF_SUCCESS;
3311}
3312
3313
3314/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3315FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3316{
3317 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3318 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3319}
3320
3321/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3322FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3323{
3324 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3325 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3326}
3327/* Opcode 0xf3 0x0f 0x68 - invalid */
3328
3329
3330/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3331FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3332{
3333 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3334 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3335}
3336
3337/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3338FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3339{
3340 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3341 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3342
3343}
3344/* Opcode 0xf3 0x0f 0x69 - invalid */
3345
3346
3347/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3348FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3349{
3350 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3351 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3352}
3353
3354/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3355FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3356{
3357 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3358 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3359}
3360/* Opcode 0xf3 0x0f 0x6a - invalid */
3361
3362
3363/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3364FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3365/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3366FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3367/* Opcode 0xf3 0x0f 0x6b - invalid */
3368
3369
3370/* Opcode 0x0f 0x6c - invalid */
3371
3372/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3373FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3374{
3375 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3376 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3377}
3378
3379/* Opcode 0xf3 0x0f 0x6c - invalid */
3380/* Opcode 0xf2 0x0f 0x6c - invalid */
3381
3382
3383/* Opcode 0x0f 0x6d - invalid */
3384
3385/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3386FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3387{
3388 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3389 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3390}
3391
3392/* Opcode 0xf3 0x0f 0x6d - invalid */
3393
3394
3395FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3396{
3397 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3398 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3399 {
3400 /**
3401 * @opcode 0x6e
3402 * @opcodesub rex.w=1
3403 * @oppfx none
3404 * @opcpuid mmx
3405 * @opgroup og_mmx_datamove
3406 * @opxcpttype 5
3407 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
3408 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
3409 */
3410 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3411 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3412 {
3413 /* MMX, greg64 */
3414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3415 IEM_MC_BEGIN(0, 1);
3416 IEM_MC_LOCAL(uint64_t, u64Tmp);
3417
3418 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3419 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3420
3421 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3422 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3423 IEM_MC_FPU_TO_MMX_MODE();
3424
3425 IEM_MC_ADVANCE_RIP();
3426 IEM_MC_END();
3427 }
3428 else
3429 {
3430 /* MMX, [mem64] */
3431 IEM_MC_BEGIN(0, 2);
3432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3433 IEM_MC_LOCAL(uint64_t, u64Tmp);
3434
3435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3437 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3438 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3439
3440 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3441 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3442 IEM_MC_FPU_TO_MMX_MODE();
3443
3444 IEM_MC_ADVANCE_RIP();
3445 IEM_MC_END();
3446 }
3447 }
3448 else
3449 {
3450 /**
3451 * @opdone
3452 * @opcode 0x6e
3453 * @opcodesub rex.w=0
3454 * @oppfx none
3455 * @opcpuid mmx
3456 * @opgroup og_mmx_datamove
3457 * @opxcpttype 5
3458 * @opfunction iemOp_movd_q_Pd_Ey
3459 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3460 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3461 */
3462 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3463 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3464 {
3465 /* MMX, greg */
3466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3467 IEM_MC_BEGIN(0, 1);
3468 IEM_MC_LOCAL(uint64_t, u64Tmp);
3469
3470 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3471 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3472
3473 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3474 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3475 IEM_MC_FPU_TO_MMX_MODE();
3476
3477 IEM_MC_ADVANCE_RIP();
3478 IEM_MC_END();
3479 }
3480 else
3481 {
3482 /* MMX, [mem] */
3483 IEM_MC_BEGIN(0, 2);
3484 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3485 IEM_MC_LOCAL(uint32_t, u32Tmp);
3486
3487 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3489 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3490 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3491
3492 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3493 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3494 IEM_MC_FPU_TO_MMX_MODE();
3495
3496 IEM_MC_ADVANCE_RIP();
3497 IEM_MC_END();
3498 }
3499 }
3500 return VINF_SUCCESS;
3501}
3502
3503FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3504{
3505 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3506 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3507 {
3508 /**
3509 * @opcode 0x6e
3510 * @opcodesub rex.w=1
3511 * @oppfx 0x66
3512 * @opcpuid sse2
3513 * @opgroup og_sse2_simdint_datamove
3514 * @opxcpttype 5
3515 * @optest 64-bit / op1=1 op2=2 -> op1=2
3516 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3517 */
3518 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3519 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3520 {
3521 /* XMM, greg64 */
3522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3523 IEM_MC_BEGIN(0, 1);
3524 IEM_MC_LOCAL(uint64_t, u64Tmp);
3525
3526 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3527 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3528
3529 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3530 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3531
3532 IEM_MC_ADVANCE_RIP();
3533 IEM_MC_END();
3534 }
3535 else
3536 {
3537 /* XMM, [mem64] */
3538 IEM_MC_BEGIN(0, 2);
3539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3540 IEM_MC_LOCAL(uint64_t, u64Tmp);
3541
3542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3544 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3545 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3546
3547 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3548 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3549
3550 IEM_MC_ADVANCE_RIP();
3551 IEM_MC_END();
3552 }
3553 }
3554 else
3555 {
3556 /**
3557 * @opdone
3558 * @opcode 0x6e
3559 * @opcodesub rex.w=0
3560 * @oppfx 0x66
3561 * @opcpuid sse2
3562 * @opgroup og_sse2_simdint_datamove
3563 * @opxcpttype 5
3564 * @opfunction iemOp_movd_q_Vy_Ey
3565 * @optest op1=1 op2=2 -> op1=2
3566 * @optest op1=0 op2=-42 -> op1=-42
3567 */
3568 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3569 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3570 {
3571 /* XMM, greg32 */
3572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3573 IEM_MC_BEGIN(0, 1);
3574 IEM_MC_LOCAL(uint32_t, u32Tmp);
3575
3576 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3577 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3578
3579 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3580 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3581
3582 IEM_MC_ADVANCE_RIP();
3583 IEM_MC_END();
3584 }
3585 else
3586 {
3587 /* XMM, [mem32] */
3588 IEM_MC_BEGIN(0, 2);
3589 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3590 IEM_MC_LOCAL(uint32_t, u32Tmp);
3591
3592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3594 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3595 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3596
3597 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3598 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3599
3600 IEM_MC_ADVANCE_RIP();
3601 IEM_MC_END();
3602 }
3603 }
3604 return VINF_SUCCESS;
3605}
3606
3607/* Opcode 0xf3 0x0f 0x6e - invalid */
3608
3609
3610/**
3611 * @opcode 0x6f
3612 * @oppfx none
3613 * @opcpuid mmx
3614 * @opgroup og_mmx_datamove
3615 * @opxcpttype 5
3616 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3617 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3618 */
3619FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3620{
3621 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3622 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3623 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3624 {
3625 /*
3626 * Register, register.
3627 */
3628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3629 IEM_MC_BEGIN(0, 1);
3630 IEM_MC_LOCAL(uint64_t, u64Tmp);
3631
3632 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3633 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3634
3635 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3636 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3637 IEM_MC_FPU_TO_MMX_MODE();
3638
3639 IEM_MC_ADVANCE_RIP();
3640 IEM_MC_END();
3641 }
3642 else
3643 {
3644 /*
3645 * Register, memory.
3646 */
3647 IEM_MC_BEGIN(0, 2);
3648 IEM_MC_LOCAL(uint64_t, u64Tmp);
3649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3650
3651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3653 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3654 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3655
3656 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3657 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3658 IEM_MC_FPU_TO_MMX_MODE();
3659
3660 IEM_MC_ADVANCE_RIP();
3661 IEM_MC_END();
3662 }
3663 return VINF_SUCCESS;
3664}
3665
3666/**
3667 * @opcode 0x6f
3668 * @oppfx 0x66
3669 * @opcpuid sse2
3670 * @opgroup og_sse2_simdint_datamove
3671 * @opxcpttype 1
3672 * @optest op1=1 op2=2 -> op1=2
3673 * @optest op1=0 op2=-42 -> op1=-42
3674 */
3675FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
3676{
3677 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3678 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3679 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3680 {
3681 /*
3682 * Register, register.
3683 */
3684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3685 IEM_MC_BEGIN(0, 0);
3686
3687 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3688 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3689
3690 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3691 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3692 IEM_MC_ADVANCE_RIP();
3693 IEM_MC_END();
3694 }
3695 else
3696 {
3697 /*
3698 * Register, memory.
3699 */
3700 IEM_MC_BEGIN(0, 2);
3701 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3702 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3703
3704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3706 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3707 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3708
3709 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3710 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3711
3712 IEM_MC_ADVANCE_RIP();
3713 IEM_MC_END();
3714 }
3715 return VINF_SUCCESS;
3716}
3717
3718/**
3719 * @opcode 0x6f
3720 * @oppfx 0xf3
3721 * @opcpuid sse2
3722 * @opgroup og_sse2_simdint_datamove
3723 * @opxcpttype 4UA
3724 * @optest op1=1 op2=2 -> op1=2
3725 * @optest op1=0 op2=-42 -> op1=-42
3726 */
3727FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
3728{
3729 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3730 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3731 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3732 {
3733 /*
3734 * Register, register.
3735 */
3736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3737 IEM_MC_BEGIN(0, 0);
3738 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3739 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3740 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3741 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3742 IEM_MC_ADVANCE_RIP();
3743 IEM_MC_END();
3744 }
3745 else
3746 {
3747 /*
3748 * Register, memory.
3749 */
3750 IEM_MC_BEGIN(0, 2);
3751 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3753
3754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3756 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3757 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3758 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3759 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3760
3761 IEM_MC_ADVANCE_RIP();
3762 IEM_MC_END();
3763 }
3764 return VINF_SUCCESS;
3765}
3766
3767
3768/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3769FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3770{
3771 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3773 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3774 {
3775 /*
3776 * Register, register.
3777 */
3778 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3780
3781 IEM_MC_BEGIN(3, 0);
3782 IEM_MC_ARG(uint64_t *, pDst, 0);
3783 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3784 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3785 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3786 IEM_MC_PREPARE_FPU_USAGE();
3787 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3788 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3789 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3790 IEM_MC_ADVANCE_RIP();
3791 IEM_MC_END();
3792 }
3793 else
3794 {
3795 /*
3796 * Register, memory.
3797 */
3798 IEM_MC_BEGIN(3, 2);
3799 IEM_MC_ARG(uint64_t *, pDst, 0);
3800 IEM_MC_LOCAL(uint64_t, uSrc);
3801 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3802 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3803
3804 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3805 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3806 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3808 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3809
3810 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3811 IEM_MC_PREPARE_FPU_USAGE();
3812 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3813 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3814
3815 IEM_MC_ADVANCE_RIP();
3816 IEM_MC_END();
3817 }
3818 return VINF_SUCCESS;
3819}
3820
3821/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3822FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3823{
3824 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3825 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3826 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3827 {
3828 /*
3829 * Register, register.
3830 */
3831 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3833
3834 IEM_MC_BEGIN(3, 0);
3835 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3836 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3837 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3838 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3839 IEM_MC_PREPARE_SSE_USAGE();
3840 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3841 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3842 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3843 IEM_MC_ADVANCE_RIP();
3844 IEM_MC_END();
3845 }
3846 else
3847 {
3848 /*
3849 * Register, memory.
3850 */
3851 IEM_MC_BEGIN(3, 2);
3852 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3853 IEM_MC_LOCAL(RTUINT128U, uSrc);
3854 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3855 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3856
3857 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3858 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3859 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3861 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3862
3863 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3864 IEM_MC_PREPARE_SSE_USAGE();
3865 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3866 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3867
3868 IEM_MC_ADVANCE_RIP();
3869 IEM_MC_END();
3870 }
3871 return VINF_SUCCESS;
3872}
3873
3874/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3875FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3876{
3877 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3878 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3879 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3880 {
3881 /*
3882 * Register, register.
3883 */
3884 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3886
3887 IEM_MC_BEGIN(3, 0);
3888 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3889 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3890 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3891 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3892 IEM_MC_PREPARE_SSE_USAGE();
3893 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3894 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3895 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3896 IEM_MC_ADVANCE_RIP();
3897 IEM_MC_END();
3898 }
3899 else
3900 {
3901 /*
3902 * Register, memory.
3903 */
3904 IEM_MC_BEGIN(3, 2);
3905 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3906 IEM_MC_LOCAL(RTUINT128U, uSrc);
3907 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3908 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3909
3910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3911 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3912 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3914 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3915
3916 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3917 IEM_MC_PREPARE_SSE_USAGE();
3918 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3919 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3920
3921 IEM_MC_ADVANCE_RIP();
3922 IEM_MC_END();
3923 }
3924 return VINF_SUCCESS;
3925}
3926
3927/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3928FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3929{
3930 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3931 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3932 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3933 {
3934 /*
3935 * Register, register.
3936 */
3937 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3939
3940 IEM_MC_BEGIN(3, 0);
3941 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3942 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3943 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3944 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3945 IEM_MC_PREPARE_SSE_USAGE();
3946 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3947 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3948 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3949 IEM_MC_ADVANCE_RIP();
3950 IEM_MC_END();
3951 }
3952 else
3953 {
3954 /*
3955 * Register, memory.
3956 */
3957 IEM_MC_BEGIN(3, 2);
3958 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3959 IEM_MC_LOCAL(RTUINT128U, uSrc);
3960 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3961 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3962
3963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3964 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3965 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3967 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3968
3969 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3970 IEM_MC_PREPARE_SSE_USAGE();
3971 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3972 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3973
3974 IEM_MC_ADVANCE_RIP();
3975 IEM_MC_END();
3976 }
3977 return VINF_SUCCESS;
3978}
3979
3980
3981/** Opcode 0x0f 0x71 11/2. */
3982FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3983
3984/** Opcode 0x66 0x0f 0x71 11/2. */
3985FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3986
3987/** Opcode 0x0f 0x71 11/4. */
3988FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3989
3990/** Opcode 0x66 0x0f 0x71 11/4. */
3991FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3992
3993/** Opcode 0x0f 0x71 11/6. */
3994FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3995
3996/** Opcode 0x66 0x0f 0x71 11/6. */
3997FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3998
3999
4000/**
4001 * Group 12 jump table for register variant.
4002 */
4003IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
4004{
4005 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4006 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4007 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4008 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4009 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4010 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4011 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4012 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4013};
4014AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
4015
4016
4017/** Opcode 0x0f 0x71. */
4018FNIEMOP_DEF(iemOp_Grp12)
4019{
4020 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4021 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4022 /* register, register */
4023 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4024 + pVCpu->iem.s.idxPrefix], bRm);
4025 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4026}
4027
4028
4029/** Opcode 0x0f 0x72 11/2. */
4030FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
4031
4032/** Opcode 0x66 0x0f 0x72 11/2. */
4033FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
4034
4035/** Opcode 0x0f 0x72 11/4. */
4036FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
4037
4038/** Opcode 0x66 0x0f 0x72 11/4. */
4039FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
4040
4041/** Opcode 0x0f 0x72 11/6. */
4042FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
4043
4044/** Opcode 0x66 0x0f 0x72 11/6. */
4045FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
4046
4047
4048/**
4049 * Group 13 jump table for register variant.
4050 */
4051IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
4052{
4053 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4054 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4055 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4056 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4057 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4058 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4059 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4060 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4061};
4062AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
4063
4064/** Opcode 0x0f 0x72. */
4065FNIEMOP_DEF(iemOp_Grp13)
4066{
4067 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4068 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4069 /* register, register */
4070 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4071 + pVCpu->iem.s.idxPrefix], bRm);
4072 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4073}
4074
4075
4076/** Opcode 0x0f 0x73 11/2. */
4077FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
4078
4079/** Opcode 0x66 0x0f 0x73 11/2. */
4080FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
4081
4082/** Opcode 0x66 0x0f 0x73 11/3. */
4083FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
4084
4085/** Opcode 0x0f 0x73 11/6. */
4086FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
4087
4088/** Opcode 0x66 0x0f 0x73 11/6. */
4089FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
4090
4091/** Opcode 0x66 0x0f 0x73 11/7. */
4092FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
4093
4094/**
4095 * Group 14 jump table for register variant.
4096 */
4097IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4098{
4099 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4100 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4101 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4102 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4103 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4104 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4105 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4106 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4107};
4108AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4109
4110
4111/** Opcode 0x0f 0x73. */
4112FNIEMOP_DEF(iemOp_Grp14)
4113{
4114 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4115 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4116 /* register, register */
4117 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4118 + pVCpu->iem.s.idxPrefix], bRm);
4119 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4120}
4121
4122
4123/**
4124 * Common worker for MMX instructions on the form:
4125 * pxxx mm1, mm2/mem64
4126 */
4127FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4128{
4129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4130 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4131 {
4132 /*
4133 * Register, register.
4134 */
4135 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4136 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4138 IEM_MC_BEGIN(2, 0);
4139 IEM_MC_ARG(uint64_t *, pDst, 0);
4140 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4141 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4142 IEM_MC_PREPARE_FPU_USAGE();
4143 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4144 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
4145 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4146 IEM_MC_ADVANCE_RIP();
4147 IEM_MC_END();
4148 }
4149 else
4150 {
4151 /*
4152 * Register, memory.
4153 */
4154 IEM_MC_BEGIN(2, 2);
4155 IEM_MC_ARG(uint64_t *, pDst, 0);
4156 IEM_MC_LOCAL(uint64_t, uSrc);
4157 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4159
4160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4162 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4163 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4164
4165 IEM_MC_PREPARE_FPU_USAGE();
4166 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4167 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4168
4169 IEM_MC_ADVANCE_RIP();
4170 IEM_MC_END();
4171 }
4172 return VINF_SUCCESS;
4173}
4174
4175
4176/**
4177 * Common worker for SSE2 instructions on the forms:
4178 * pxxx xmm1, xmm2/mem128
4179 *
4180 * Proper alignment of the 128-bit operand is enforced.
4181 * Exceptions type 4. SSE2 cpuid checks.
4182 */
4183FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4184{
4185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4187 {
4188 /*
4189 * Register, register.
4190 */
4191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4192 IEM_MC_BEGIN(2, 0);
4193 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4194 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4195 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4196 IEM_MC_PREPARE_SSE_USAGE();
4197 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4198 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4199 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4200 IEM_MC_ADVANCE_RIP();
4201 IEM_MC_END();
4202 }
4203 else
4204 {
4205 /*
4206 * Register, memory.
4207 */
4208 IEM_MC_BEGIN(2, 2);
4209 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4210 IEM_MC_LOCAL(RTUINT128U, uSrc);
4211 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4212 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4213
4214 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4216 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4217 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4218
4219 IEM_MC_PREPARE_SSE_USAGE();
4220 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4221 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4222
4223 IEM_MC_ADVANCE_RIP();
4224 IEM_MC_END();
4225 }
4226 return VINF_SUCCESS;
4227}
4228
4229
4230/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4231FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4232{
4233 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
4234 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4235}
4236
4237/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4238FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4239{
4240 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
4241 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4242}
4243
4244/* Opcode 0xf3 0x0f 0x74 - invalid */
4245/* Opcode 0xf2 0x0f 0x74 - invalid */
4246
4247
4248/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4249FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4250{
4251 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
4252 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4253}
4254
4255/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4256FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4257{
4258 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
4259 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4260}
4261
4262/* Opcode 0xf3 0x0f 0x75 - invalid */
4263/* Opcode 0xf2 0x0f 0x75 - invalid */
4264
4265
4266/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4267FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4268{
4269 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
4270 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4271}
4272
4273/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4274FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4275{
4276 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
4277 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4278}
4279
4280/* Opcode 0xf3 0x0f 0x76 - invalid */
4281/* Opcode 0xf2 0x0f 0x76 - invalid */
4282
4283
4284/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4285FNIEMOP_DEF(iemOp_emms)
4286{
4287 IEMOP_MNEMONIC(emms, "emms");
4288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4289
4290 IEM_MC_BEGIN(0,0);
4291 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
4292 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4293 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4294 IEM_MC_FPU_FROM_MMX_MODE();
4295 IEM_MC_ADVANCE_RIP();
4296 IEM_MC_END();
4297 return VINF_SUCCESS;
4298}
4299
4300/* Opcode 0x66 0x0f 0x77 - invalid */
4301/* Opcode 0xf3 0x0f 0x77 - invalid */
4302/* Opcode 0xf2 0x0f 0x77 - invalid */
4303
4304/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4305#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4306FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
4307{
4308 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
4309 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
4310 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
4311 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
4312
4313 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4314 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4315 {
4316 /*
4317 * Register, register.
4318 */
4319 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4320 if (enmEffOpSize == IEMMODE_64BIT)
4321 {
4322 IEM_MC_BEGIN(2, 0);
4323 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4324 IEM_MC_ARG(uint64_t, u64Enc, 1);
4325 IEM_MC_FETCH_GREG_U64(u64Enc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4326 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4327 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread64_reg, pu64Dst, u64Enc);
4328 IEM_MC_END();
4329 }
4330 else
4331 {
4332 IEM_MC_BEGIN(2, 0);
4333 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4334 IEM_MC_ARG(uint32_t, u32Enc, 1);
4335 IEM_MC_FETCH_GREG_U32(u32Enc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4336 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4337 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread32_reg, pu32Dst, u32Enc);
4338 IEM_MC_END();
4339 }
4340 }
4341 else
4342 {
4343 /*
4344 * Register, memory.
4345 */
4346 if (enmEffOpSize == IEMMODE_64BIT)
4347 {
4348 IEM_MC_BEGIN(4, 0);
4349 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4350 IEM_MC_ARG_CONST(IEMMODE, enmEffAddrMode,/*=*/pVCpu->iem.s.enmEffAddrMode, 1);
4351 IEM_MC_ARG(RTGCPTR, GCPtrVal, 2);
4352 IEM_MC_ARG(uint64_t, u64Enc, 3);
4353 IEM_MC_FETCH_GREG_U64(u64Enc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4355 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4356 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4357 IEM_MC_CALL_CIMPL_4(iemCImpl_vmread_mem, iEffSeg, enmEffAddrMode, GCPtrVal, u64Enc);
4358 IEM_MC_END();
4359 }
4360 else
4361 {
4362 IEM_MC_BEGIN(4, 0);
4363 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4364 IEM_MC_ARG_CONST(IEMMODE, enmEffAddrMode,/*=*/pVCpu->iem.s.enmEffAddrMode, 1);
4365 IEM_MC_ARG(RTGCPTR, GCPtrVal, 2);
4366 IEM_MC_ARG(uint32_t, u32Enc, 3);
4367 IEM_MC_FETCH_GREG_U32(u32Enc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4369 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4370 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4371 IEM_MC_CALL_CIMPL_4(iemCImpl_vmread_mem, iEffSeg, enmEffAddrMode, GCPtrVal, u32Enc);
4372 IEM_MC_END();
4373 }
4374 }
4375 return VINF_SUCCESS;
4376}
4377#else
4378FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4379#endif
4380
4381/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4382FNIEMOP_STUB(iemOp_AmdGrp17);
4383/* Opcode 0xf3 0x0f 0x78 - invalid */
4384/* Opcode 0xf2 0x0f 0x78 - invalid */
4385
4386/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4387#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4388FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
4389{
4390 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
4391 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
4392 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
4393 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
4394
4395 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4396 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4397 {
4398 /*
4399 * Register, register.
4400 */
4401 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4402 if (enmEffOpSize == IEMMODE_64BIT)
4403 {
4404 IEM_MC_BEGIN(2, 0);
4405 IEM_MC_ARG(uint64_t, u64Val, 0);
4406 IEM_MC_ARG(uint64_t, u64Enc, 1);
4407 IEM_MC_FETCH_GREG_U64(u64Val, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4408 IEM_MC_FETCH_GREG_U64(u64Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4409 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
4410 IEM_MC_END();
4411 }
4412 else
4413 {
4414 IEM_MC_BEGIN(2, 0);
4415 IEM_MC_ARG(uint32_t, u32Val, 0);
4416 IEM_MC_ARG(uint32_t, u32Enc, 1);
4417 IEM_MC_FETCH_GREG_U32(u32Val, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4418 IEM_MC_FETCH_GREG_U32(u32Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4419 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
4420 IEM_MC_END();
4421 }
4422 }
4423 else
4424 {
4425 /*
4426 * Register, memory.
4427 */
4428 if (enmEffOpSize == IEMMODE_64BIT)
4429 {
4430 IEM_MC_BEGIN(4, 0);
4431 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4432 IEM_MC_ARG_CONST(IEMMODE, enmEffAddrMode,/*=*/pVCpu->iem.s.enmEffAddrMode, 1);
4433 IEM_MC_ARG(RTGCPTR, GCPtrVal, 2);
4434 IEM_MC_ARG(uint64_t, u64Enc, 3);
4435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4436 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4437 IEM_MC_FETCH_GREG_U64(u64Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4438 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4439 IEM_MC_CALL_CIMPL_4(iemCImpl_vmwrite_mem, iEffSeg, enmEffAddrMode, GCPtrVal, u64Enc);
4440 IEM_MC_END();
4441 }
4442 else
4443 {
4444 IEM_MC_BEGIN(4, 0);
4445 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4446 IEM_MC_ARG_CONST(IEMMODE, enmEffAddrMode,/*=*/pVCpu->iem.s.enmEffAddrMode, 1);
4447 IEM_MC_ARG(RTGCPTR, GCPtrVal, 2);
4448 IEM_MC_ARG(uint32_t, u32Enc, 3);
4449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4450 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4451 IEM_MC_FETCH_GREG_U32(u32Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4452 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4453 IEM_MC_CALL_CIMPL_4(iemCImpl_vmwrite_mem, iEffSeg, enmEffAddrMode, GCPtrVal, u32Enc);
4454 IEM_MC_END();
4455 }
4456 }
4457 return VINF_SUCCESS;
4458}
4459#else
4460FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4461#endif
4462/* Opcode 0x66 0x0f 0x79 - invalid */
4463/* Opcode 0xf3 0x0f 0x79 - invalid */
4464/* Opcode 0xf2 0x0f 0x79 - invalid */
4465
4466/* Opcode 0x0f 0x7a - invalid */
4467/* Opcode 0x66 0x0f 0x7a - invalid */
4468/* Opcode 0xf3 0x0f 0x7a - invalid */
4469/* Opcode 0xf2 0x0f 0x7a - invalid */
4470
4471/* Opcode 0x0f 0x7b - invalid */
4472/* Opcode 0x66 0x0f 0x7b - invalid */
4473/* Opcode 0xf3 0x0f 0x7b - invalid */
4474/* Opcode 0xf2 0x0f 0x7b - invalid */
4475
4476/* Opcode 0x0f 0x7c - invalid */
4477/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4478FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4479/* Opcode 0xf3 0x0f 0x7c - invalid */
4480/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4481FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4482
4483/* Opcode 0x0f 0x7d - invalid */
4484/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4485FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4486/* Opcode 0xf3 0x0f 0x7d - invalid */
4487/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4488FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4489
4490
4491/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4492FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4493{
4494 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4495 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4496 {
4497 /**
4498 * @opcode 0x7e
4499 * @opcodesub rex.w=1
4500 * @oppfx none
4501 * @opcpuid mmx
4502 * @opgroup og_mmx_datamove
4503 * @opxcpttype 5
4504 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
4505 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
4506 */
4507 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4508 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4509 {
4510 /* greg64, MMX */
4511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4512 IEM_MC_BEGIN(0, 1);
4513 IEM_MC_LOCAL(uint64_t, u64Tmp);
4514
4515 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4516 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4517
4518 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4519 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4520 IEM_MC_FPU_TO_MMX_MODE();
4521
4522 IEM_MC_ADVANCE_RIP();
4523 IEM_MC_END();
4524 }
4525 else
4526 {
4527 /* [mem64], MMX */
4528 IEM_MC_BEGIN(0, 2);
4529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4530 IEM_MC_LOCAL(uint64_t, u64Tmp);
4531
4532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4534 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4535 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4536
4537 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4538 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4539 IEM_MC_FPU_TO_MMX_MODE();
4540
4541 IEM_MC_ADVANCE_RIP();
4542 IEM_MC_END();
4543 }
4544 }
4545 else
4546 {
4547 /**
4548 * @opdone
4549 * @opcode 0x7e
4550 * @opcodesub rex.w=0
4551 * @oppfx none
4552 * @opcpuid mmx
4553 * @opgroup og_mmx_datamove
4554 * @opxcpttype 5
4555 * @opfunction iemOp_movd_q_Pd_Ey
4556 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4557 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4558 */
4559 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4560 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4561 {
4562 /* greg32, MMX */
4563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4564 IEM_MC_BEGIN(0, 1);
4565 IEM_MC_LOCAL(uint32_t, u32Tmp);
4566
4567 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4568 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4569
4570 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4571 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4572 IEM_MC_FPU_TO_MMX_MODE();
4573
4574 IEM_MC_ADVANCE_RIP();
4575 IEM_MC_END();
4576 }
4577 else
4578 {
4579 /* [mem32], MMX */
4580 IEM_MC_BEGIN(0, 2);
4581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4582 IEM_MC_LOCAL(uint32_t, u32Tmp);
4583
4584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4586 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4587 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4588
4589 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4590 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4591 IEM_MC_FPU_TO_MMX_MODE();
4592
4593 IEM_MC_ADVANCE_RIP();
4594 IEM_MC_END();
4595 }
4596 }
4597 return VINF_SUCCESS;
4598
4599}
4600
4601
4602FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4603{
4604 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4605 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4606 {
4607 /**
4608 * @opcode 0x7e
4609 * @opcodesub rex.w=1
4610 * @oppfx 0x66
4611 * @opcpuid sse2
4612 * @opgroup og_sse2_simdint_datamove
4613 * @opxcpttype 5
4614 * @optest 64-bit / op1=1 op2=2 -> op1=2
4615 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4616 */
4617 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4618 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4619 {
4620 /* greg64, XMM */
4621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4622 IEM_MC_BEGIN(0, 1);
4623 IEM_MC_LOCAL(uint64_t, u64Tmp);
4624
4625 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4626 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4627
4628 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4629 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4630
4631 IEM_MC_ADVANCE_RIP();
4632 IEM_MC_END();
4633 }
4634 else
4635 {
4636 /* [mem64], XMM */
4637 IEM_MC_BEGIN(0, 2);
4638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4639 IEM_MC_LOCAL(uint64_t, u64Tmp);
4640
4641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4643 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4644 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4645
4646 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4647 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4648
4649 IEM_MC_ADVANCE_RIP();
4650 IEM_MC_END();
4651 }
4652 }
4653 else
4654 {
4655 /**
4656 * @opdone
4657 * @opcode 0x7e
4658 * @opcodesub rex.w=0
4659 * @oppfx 0x66
4660 * @opcpuid sse2
4661 * @opgroup og_sse2_simdint_datamove
4662 * @opxcpttype 5
4663 * @opfunction iemOp_movd_q_Vy_Ey
4664 * @optest op1=1 op2=2 -> op1=2
4665 * @optest op1=0 op2=-42 -> op1=-42
4666 */
4667 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4668 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4669 {
4670 /* greg32, XMM */
4671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4672 IEM_MC_BEGIN(0, 1);
4673 IEM_MC_LOCAL(uint32_t, u32Tmp);
4674
4675 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4676 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4677
4678 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4679 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4680
4681 IEM_MC_ADVANCE_RIP();
4682 IEM_MC_END();
4683 }
4684 else
4685 {
4686 /* [mem32], XMM */
4687 IEM_MC_BEGIN(0, 2);
4688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4689 IEM_MC_LOCAL(uint32_t, u32Tmp);
4690
4691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4693 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4694 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4695
4696 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4697 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4698
4699 IEM_MC_ADVANCE_RIP();
4700 IEM_MC_END();
4701 }
4702 }
4703 return VINF_SUCCESS;
4704
4705}
4706
4707/**
4708 * @opcode 0x7e
4709 * @oppfx 0xf3
4710 * @opcpuid sse2
4711 * @opgroup og_sse2_pcksclr_datamove
4712 * @opxcpttype none
4713 * @optest op1=1 op2=2 -> op1=2
4714 * @optest op1=0 op2=-42 -> op1=-42
4715 */
4716FNIEMOP_DEF(iemOp_movq_Vq_Wq)
4717{
4718 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4719 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4720 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4721 {
4722 /*
4723 * Register, register.
4724 */
4725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4726 IEM_MC_BEGIN(0, 2);
4727 IEM_MC_LOCAL(uint64_t, uSrc);
4728
4729 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4730 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4731
4732 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4733 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4734
4735 IEM_MC_ADVANCE_RIP();
4736 IEM_MC_END();
4737 }
4738 else
4739 {
4740 /*
4741 * Memory, register.
4742 */
4743 IEM_MC_BEGIN(0, 2);
4744 IEM_MC_LOCAL(uint64_t, uSrc);
4745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4746
4747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4749 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4750 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4751
4752 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4753 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4754
4755 IEM_MC_ADVANCE_RIP();
4756 IEM_MC_END();
4757 }
4758 return VINF_SUCCESS;
4759}
4760
4761/* Opcode 0xf2 0x0f 0x7e - invalid */
4762
4763
4764/** Opcode 0x0f 0x7f - movq Qq, Pq */
4765FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4766{
4767 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4768 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4769 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4770 {
4771 /*
4772 * Register, register.
4773 */
4774 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4775 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4777 IEM_MC_BEGIN(0, 1);
4778 IEM_MC_LOCAL(uint64_t, u64Tmp);
4779 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4780 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4781 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4782 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4783 IEM_MC_ADVANCE_RIP();
4784 IEM_MC_END();
4785 }
4786 else
4787 {
4788 /*
4789 * Register, memory.
4790 */
4791 IEM_MC_BEGIN(0, 2);
4792 IEM_MC_LOCAL(uint64_t, u64Tmp);
4793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4794
4795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4797 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4798 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4799
4800 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4801 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4802
4803 IEM_MC_ADVANCE_RIP();
4804 IEM_MC_END();
4805 }
4806 return VINF_SUCCESS;
4807}
4808
4809/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4810FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4811{
4812 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4813 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4814 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4815 {
4816 /*
4817 * Register, register.
4818 */
4819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4820 IEM_MC_BEGIN(0, 0);
4821 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4822 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4823 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4824 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4825 IEM_MC_ADVANCE_RIP();
4826 IEM_MC_END();
4827 }
4828 else
4829 {
4830 /*
4831 * Register, memory.
4832 */
4833 IEM_MC_BEGIN(0, 2);
4834 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4835 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4836
4837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4839 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4840 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4841
4842 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4843 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4844
4845 IEM_MC_ADVANCE_RIP();
4846 IEM_MC_END();
4847 }
4848 return VINF_SUCCESS;
4849}
4850
4851/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4852FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4853{
4854 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4855 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4856 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4857 {
4858 /*
4859 * Register, register.
4860 */
4861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4862 IEM_MC_BEGIN(0, 0);
4863 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4864 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4865 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4866 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4867 IEM_MC_ADVANCE_RIP();
4868 IEM_MC_END();
4869 }
4870 else
4871 {
4872 /*
4873 * Register, memory.
4874 */
4875 IEM_MC_BEGIN(0, 2);
4876 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4877 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4878
4879 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4881 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4882 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4883
4884 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4885 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4886
4887 IEM_MC_ADVANCE_RIP();
4888 IEM_MC_END();
4889 }
4890 return VINF_SUCCESS;
4891}
4892
4893/* Opcode 0xf2 0x0f 0x7f - invalid */
4894
4895
4896
4897/** Opcode 0x0f 0x80. */
4898FNIEMOP_DEF(iemOp_jo_Jv)
4899{
4900 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4901 IEMOP_HLP_MIN_386();
4902 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4903 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4904 {
4905 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4907
4908 IEM_MC_BEGIN(0, 0);
4909 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4910 IEM_MC_REL_JMP_S16(i16Imm);
4911 } IEM_MC_ELSE() {
4912 IEM_MC_ADVANCE_RIP();
4913 } IEM_MC_ENDIF();
4914 IEM_MC_END();
4915 }
4916 else
4917 {
4918 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4920
4921 IEM_MC_BEGIN(0, 0);
4922 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4923 IEM_MC_REL_JMP_S32(i32Imm);
4924 } IEM_MC_ELSE() {
4925 IEM_MC_ADVANCE_RIP();
4926 } IEM_MC_ENDIF();
4927 IEM_MC_END();
4928 }
4929 return VINF_SUCCESS;
4930}
4931
4932
4933/** Opcode 0x0f 0x81. */
4934FNIEMOP_DEF(iemOp_jno_Jv)
4935{
4936 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4937 IEMOP_HLP_MIN_386();
4938 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4939 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4940 {
4941 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4943
4944 IEM_MC_BEGIN(0, 0);
4945 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4946 IEM_MC_ADVANCE_RIP();
4947 } IEM_MC_ELSE() {
4948 IEM_MC_REL_JMP_S16(i16Imm);
4949 } IEM_MC_ENDIF();
4950 IEM_MC_END();
4951 }
4952 else
4953 {
4954 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4956
4957 IEM_MC_BEGIN(0, 0);
4958 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4959 IEM_MC_ADVANCE_RIP();
4960 } IEM_MC_ELSE() {
4961 IEM_MC_REL_JMP_S32(i32Imm);
4962 } IEM_MC_ENDIF();
4963 IEM_MC_END();
4964 }
4965 return VINF_SUCCESS;
4966}
4967
4968
4969/** Opcode 0x0f 0x82. */
4970FNIEMOP_DEF(iemOp_jc_Jv)
4971{
4972 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4973 IEMOP_HLP_MIN_386();
4974 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4975 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4976 {
4977 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4979
4980 IEM_MC_BEGIN(0, 0);
4981 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4982 IEM_MC_REL_JMP_S16(i16Imm);
4983 } IEM_MC_ELSE() {
4984 IEM_MC_ADVANCE_RIP();
4985 } IEM_MC_ENDIF();
4986 IEM_MC_END();
4987 }
4988 else
4989 {
4990 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4992
4993 IEM_MC_BEGIN(0, 0);
4994 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4995 IEM_MC_REL_JMP_S32(i32Imm);
4996 } IEM_MC_ELSE() {
4997 IEM_MC_ADVANCE_RIP();
4998 } IEM_MC_ENDIF();
4999 IEM_MC_END();
5000 }
5001 return VINF_SUCCESS;
5002}
5003
5004
5005/** Opcode 0x0f 0x83. */
5006FNIEMOP_DEF(iemOp_jnc_Jv)
5007{
5008 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
5009 IEMOP_HLP_MIN_386();
5010 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5011 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5012 {
5013 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5015
5016 IEM_MC_BEGIN(0, 0);
5017 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5018 IEM_MC_ADVANCE_RIP();
5019 } IEM_MC_ELSE() {
5020 IEM_MC_REL_JMP_S16(i16Imm);
5021 } IEM_MC_ENDIF();
5022 IEM_MC_END();
5023 }
5024 else
5025 {
5026 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5028
5029 IEM_MC_BEGIN(0, 0);
5030 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5031 IEM_MC_ADVANCE_RIP();
5032 } IEM_MC_ELSE() {
5033 IEM_MC_REL_JMP_S32(i32Imm);
5034 } IEM_MC_ENDIF();
5035 IEM_MC_END();
5036 }
5037 return VINF_SUCCESS;
5038}
5039
5040
5041/** Opcode 0x0f 0x84. */
5042FNIEMOP_DEF(iemOp_je_Jv)
5043{
5044 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
5045 IEMOP_HLP_MIN_386();
5046 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5047 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5048 {
5049 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5051
5052 IEM_MC_BEGIN(0, 0);
5053 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5054 IEM_MC_REL_JMP_S16(i16Imm);
5055 } IEM_MC_ELSE() {
5056 IEM_MC_ADVANCE_RIP();
5057 } IEM_MC_ENDIF();
5058 IEM_MC_END();
5059 }
5060 else
5061 {
5062 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5064
5065 IEM_MC_BEGIN(0, 0);
5066 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5067 IEM_MC_REL_JMP_S32(i32Imm);
5068 } IEM_MC_ELSE() {
5069 IEM_MC_ADVANCE_RIP();
5070 } IEM_MC_ENDIF();
5071 IEM_MC_END();
5072 }
5073 return VINF_SUCCESS;
5074}
5075
5076
5077/** Opcode 0x0f 0x85. */
5078FNIEMOP_DEF(iemOp_jne_Jv)
5079{
5080 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
5081 IEMOP_HLP_MIN_386();
5082 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5083 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5084 {
5085 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5087
5088 IEM_MC_BEGIN(0, 0);
5089 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5090 IEM_MC_ADVANCE_RIP();
5091 } IEM_MC_ELSE() {
5092 IEM_MC_REL_JMP_S16(i16Imm);
5093 } IEM_MC_ENDIF();
5094 IEM_MC_END();
5095 }
5096 else
5097 {
5098 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5100
5101 IEM_MC_BEGIN(0, 0);
5102 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5103 IEM_MC_ADVANCE_RIP();
5104 } IEM_MC_ELSE() {
5105 IEM_MC_REL_JMP_S32(i32Imm);
5106 } IEM_MC_ENDIF();
5107 IEM_MC_END();
5108 }
5109 return VINF_SUCCESS;
5110}
5111
5112
5113/** Opcode 0x0f 0x86. */
5114FNIEMOP_DEF(iemOp_jbe_Jv)
5115{
5116 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
5117 IEMOP_HLP_MIN_386();
5118 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5119 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5120 {
5121 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5123
5124 IEM_MC_BEGIN(0, 0);
5125 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5126 IEM_MC_REL_JMP_S16(i16Imm);
5127 } IEM_MC_ELSE() {
5128 IEM_MC_ADVANCE_RIP();
5129 } IEM_MC_ENDIF();
5130 IEM_MC_END();
5131 }
5132 else
5133 {
5134 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5136
5137 IEM_MC_BEGIN(0, 0);
5138 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5139 IEM_MC_REL_JMP_S32(i32Imm);
5140 } IEM_MC_ELSE() {
5141 IEM_MC_ADVANCE_RIP();
5142 } IEM_MC_ENDIF();
5143 IEM_MC_END();
5144 }
5145 return VINF_SUCCESS;
5146}
5147
5148
5149/** Opcode 0x0f 0x87. */
5150FNIEMOP_DEF(iemOp_jnbe_Jv)
5151{
5152 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
5153 IEMOP_HLP_MIN_386();
5154 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5155 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5156 {
5157 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5159
5160 IEM_MC_BEGIN(0, 0);
5161 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5162 IEM_MC_ADVANCE_RIP();
5163 } IEM_MC_ELSE() {
5164 IEM_MC_REL_JMP_S16(i16Imm);
5165 } IEM_MC_ENDIF();
5166 IEM_MC_END();
5167 }
5168 else
5169 {
5170 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5172
5173 IEM_MC_BEGIN(0, 0);
5174 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5175 IEM_MC_ADVANCE_RIP();
5176 } IEM_MC_ELSE() {
5177 IEM_MC_REL_JMP_S32(i32Imm);
5178 } IEM_MC_ENDIF();
5179 IEM_MC_END();
5180 }
5181 return VINF_SUCCESS;
5182}
5183
5184
5185/** Opcode 0x0f 0x88. */
5186FNIEMOP_DEF(iemOp_js_Jv)
5187{
5188 IEMOP_MNEMONIC(js_Jv, "js Jv");
5189 IEMOP_HLP_MIN_386();
5190 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5191 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5192 {
5193 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5195
5196 IEM_MC_BEGIN(0, 0);
5197 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5198 IEM_MC_REL_JMP_S16(i16Imm);
5199 } IEM_MC_ELSE() {
5200 IEM_MC_ADVANCE_RIP();
5201 } IEM_MC_ENDIF();
5202 IEM_MC_END();
5203 }
5204 else
5205 {
5206 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5208
5209 IEM_MC_BEGIN(0, 0);
5210 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5211 IEM_MC_REL_JMP_S32(i32Imm);
5212 } IEM_MC_ELSE() {
5213 IEM_MC_ADVANCE_RIP();
5214 } IEM_MC_ENDIF();
5215 IEM_MC_END();
5216 }
5217 return VINF_SUCCESS;
5218}
5219
5220
5221/** Opcode 0x0f 0x89. */
5222FNIEMOP_DEF(iemOp_jns_Jv)
5223{
5224 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
5225 IEMOP_HLP_MIN_386();
5226 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5227 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5228 {
5229 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5231
5232 IEM_MC_BEGIN(0, 0);
5233 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5234 IEM_MC_ADVANCE_RIP();
5235 } IEM_MC_ELSE() {
5236 IEM_MC_REL_JMP_S16(i16Imm);
5237 } IEM_MC_ENDIF();
5238 IEM_MC_END();
5239 }
5240 else
5241 {
5242 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5244
5245 IEM_MC_BEGIN(0, 0);
5246 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5247 IEM_MC_ADVANCE_RIP();
5248 } IEM_MC_ELSE() {
5249 IEM_MC_REL_JMP_S32(i32Imm);
5250 } IEM_MC_ENDIF();
5251 IEM_MC_END();
5252 }
5253 return VINF_SUCCESS;
5254}
5255
5256
5257/** Opcode 0x0f 0x8a. */
5258FNIEMOP_DEF(iemOp_jp_Jv)
5259{
5260 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
5261 IEMOP_HLP_MIN_386();
5262 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5263 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5264 {
5265 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5267
5268 IEM_MC_BEGIN(0, 0);
5269 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5270 IEM_MC_REL_JMP_S16(i16Imm);
5271 } IEM_MC_ELSE() {
5272 IEM_MC_ADVANCE_RIP();
5273 } IEM_MC_ENDIF();
5274 IEM_MC_END();
5275 }
5276 else
5277 {
5278 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5280
5281 IEM_MC_BEGIN(0, 0);
5282 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5283 IEM_MC_REL_JMP_S32(i32Imm);
5284 } IEM_MC_ELSE() {
5285 IEM_MC_ADVANCE_RIP();
5286 } IEM_MC_ENDIF();
5287 IEM_MC_END();
5288 }
5289 return VINF_SUCCESS;
5290}
5291
5292
5293/** Opcode 0x0f 0x8b. */
5294FNIEMOP_DEF(iemOp_jnp_Jv)
5295{
5296 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
5297 IEMOP_HLP_MIN_386();
5298 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5299 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5300 {
5301 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5303
5304 IEM_MC_BEGIN(0, 0);
5305 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5306 IEM_MC_ADVANCE_RIP();
5307 } IEM_MC_ELSE() {
5308 IEM_MC_REL_JMP_S16(i16Imm);
5309 } IEM_MC_ENDIF();
5310 IEM_MC_END();
5311 }
5312 else
5313 {
5314 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5316
5317 IEM_MC_BEGIN(0, 0);
5318 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5319 IEM_MC_ADVANCE_RIP();
5320 } IEM_MC_ELSE() {
5321 IEM_MC_REL_JMP_S32(i32Imm);
5322 } IEM_MC_ENDIF();
5323 IEM_MC_END();
5324 }
5325 return VINF_SUCCESS;
5326}
5327
5328
5329/** Opcode 0x0f 0x8c. */
5330FNIEMOP_DEF(iemOp_jl_Jv)
5331{
5332 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
5333 IEMOP_HLP_MIN_386();
5334 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5335 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5336 {
5337 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5339
5340 IEM_MC_BEGIN(0, 0);
5341 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5342 IEM_MC_REL_JMP_S16(i16Imm);
5343 } IEM_MC_ELSE() {
5344 IEM_MC_ADVANCE_RIP();
5345 } IEM_MC_ENDIF();
5346 IEM_MC_END();
5347 }
5348 else
5349 {
5350 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5352
5353 IEM_MC_BEGIN(0, 0);
5354 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5355 IEM_MC_REL_JMP_S32(i32Imm);
5356 } IEM_MC_ELSE() {
5357 IEM_MC_ADVANCE_RIP();
5358 } IEM_MC_ENDIF();
5359 IEM_MC_END();
5360 }
5361 return VINF_SUCCESS;
5362}
5363
5364
5365/** Opcode 0x0f 0x8d. */
5366FNIEMOP_DEF(iemOp_jnl_Jv)
5367{
5368 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5369 IEMOP_HLP_MIN_386();
5370 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5371 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5372 {
5373 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5375
5376 IEM_MC_BEGIN(0, 0);
5377 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5378 IEM_MC_ADVANCE_RIP();
5379 } IEM_MC_ELSE() {
5380 IEM_MC_REL_JMP_S16(i16Imm);
5381 } IEM_MC_ENDIF();
5382 IEM_MC_END();
5383 }
5384 else
5385 {
5386 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5388
5389 IEM_MC_BEGIN(0, 0);
5390 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5391 IEM_MC_ADVANCE_RIP();
5392 } IEM_MC_ELSE() {
5393 IEM_MC_REL_JMP_S32(i32Imm);
5394 } IEM_MC_ENDIF();
5395 IEM_MC_END();
5396 }
5397 return VINF_SUCCESS;
5398}
5399
5400
5401/** Opcode 0x0f 0x8e. */
5402FNIEMOP_DEF(iemOp_jle_Jv)
5403{
5404 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5405 IEMOP_HLP_MIN_386();
5406 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5407 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5408 {
5409 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5411
5412 IEM_MC_BEGIN(0, 0);
5413 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5414 IEM_MC_REL_JMP_S16(i16Imm);
5415 } IEM_MC_ELSE() {
5416 IEM_MC_ADVANCE_RIP();
5417 } IEM_MC_ENDIF();
5418 IEM_MC_END();
5419 }
5420 else
5421 {
5422 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5424
5425 IEM_MC_BEGIN(0, 0);
5426 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5427 IEM_MC_REL_JMP_S32(i32Imm);
5428 } IEM_MC_ELSE() {
5429 IEM_MC_ADVANCE_RIP();
5430 } IEM_MC_ENDIF();
5431 IEM_MC_END();
5432 }
5433 return VINF_SUCCESS;
5434}
5435
5436
5437/** Opcode 0x0f 0x8f. */
5438FNIEMOP_DEF(iemOp_jnle_Jv)
5439{
5440 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5441 IEMOP_HLP_MIN_386();
5442 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5443 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5444 {
5445 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5447
5448 IEM_MC_BEGIN(0, 0);
5449 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5450 IEM_MC_ADVANCE_RIP();
5451 } IEM_MC_ELSE() {
5452 IEM_MC_REL_JMP_S16(i16Imm);
5453 } IEM_MC_ENDIF();
5454 IEM_MC_END();
5455 }
5456 else
5457 {
5458 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5460
5461 IEM_MC_BEGIN(0, 0);
5462 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5463 IEM_MC_ADVANCE_RIP();
5464 } IEM_MC_ELSE() {
5465 IEM_MC_REL_JMP_S32(i32Imm);
5466 } IEM_MC_ENDIF();
5467 IEM_MC_END();
5468 }
5469 return VINF_SUCCESS;
5470}
5471
5472
5473/** Opcode 0x0f 0x90. */
5474FNIEMOP_DEF(iemOp_seto_Eb)
5475{
5476 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5477 IEMOP_HLP_MIN_386();
5478 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5479
5480 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5481 * any way. AMD says it's "unused", whatever that means. We're
5482 * ignoring for now. */
5483 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5484 {
5485 /* register target */
5486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5487 IEM_MC_BEGIN(0, 0);
5488 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5489 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5490 } IEM_MC_ELSE() {
5491 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5492 } IEM_MC_ENDIF();
5493 IEM_MC_ADVANCE_RIP();
5494 IEM_MC_END();
5495 }
5496 else
5497 {
5498 /* memory target */
5499 IEM_MC_BEGIN(0, 1);
5500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5503 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5504 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5505 } IEM_MC_ELSE() {
5506 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5507 } IEM_MC_ENDIF();
5508 IEM_MC_ADVANCE_RIP();
5509 IEM_MC_END();
5510 }
5511 return VINF_SUCCESS;
5512}
5513
5514
5515/** Opcode 0x0f 0x91. */
5516FNIEMOP_DEF(iemOp_setno_Eb)
5517{
5518 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5519 IEMOP_HLP_MIN_386();
5520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5521
5522 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5523 * any way. AMD says it's "unused", whatever that means. We're
5524 * ignoring for now. */
5525 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5526 {
5527 /* register target */
5528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5529 IEM_MC_BEGIN(0, 0);
5530 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5531 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5532 } IEM_MC_ELSE() {
5533 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5534 } IEM_MC_ENDIF();
5535 IEM_MC_ADVANCE_RIP();
5536 IEM_MC_END();
5537 }
5538 else
5539 {
5540 /* memory target */
5541 IEM_MC_BEGIN(0, 1);
5542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5545 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5546 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5547 } IEM_MC_ELSE() {
5548 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5549 } IEM_MC_ENDIF();
5550 IEM_MC_ADVANCE_RIP();
5551 IEM_MC_END();
5552 }
5553 return VINF_SUCCESS;
5554}
5555
5556
5557/** Opcode 0x0f 0x92. */
5558FNIEMOP_DEF(iemOp_setc_Eb)
5559{
5560 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5561 IEMOP_HLP_MIN_386();
5562 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5563
5564 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5565 * any way. AMD says it's "unused", whatever that means. We're
5566 * ignoring for now. */
5567 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5568 {
5569 /* register target */
5570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5571 IEM_MC_BEGIN(0, 0);
5572 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5573 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5574 } IEM_MC_ELSE() {
5575 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5576 } IEM_MC_ENDIF();
5577 IEM_MC_ADVANCE_RIP();
5578 IEM_MC_END();
5579 }
5580 else
5581 {
5582 /* memory target */
5583 IEM_MC_BEGIN(0, 1);
5584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5587 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5588 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5589 } IEM_MC_ELSE() {
5590 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5591 } IEM_MC_ENDIF();
5592 IEM_MC_ADVANCE_RIP();
5593 IEM_MC_END();
5594 }
5595 return VINF_SUCCESS;
5596}
5597
5598
5599/** Opcode 0x0f 0x93. */
5600FNIEMOP_DEF(iemOp_setnc_Eb)
5601{
5602 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5603 IEMOP_HLP_MIN_386();
5604 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5605
5606 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5607 * any way. AMD says it's "unused", whatever that means. We're
5608 * ignoring for now. */
5609 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5610 {
5611 /* register target */
5612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5613 IEM_MC_BEGIN(0, 0);
5614 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5615 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5616 } IEM_MC_ELSE() {
5617 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5618 } IEM_MC_ENDIF();
5619 IEM_MC_ADVANCE_RIP();
5620 IEM_MC_END();
5621 }
5622 else
5623 {
5624 /* memory target */
5625 IEM_MC_BEGIN(0, 1);
5626 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5629 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5630 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5631 } IEM_MC_ELSE() {
5632 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5633 } IEM_MC_ENDIF();
5634 IEM_MC_ADVANCE_RIP();
5635 IEM_MC_END();
5636 }
5637 return VINF_SUCCESS;
5638}
5639
5640
5641/** Opcode 0x0f 0x94. */
5642FNIEMOP_DEF(iemOp_sete_Eb)
5643{
5644 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5645 IEMOP_HLP_MIN_386();
5646 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5647
5648 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5649 * any way. AMD says it's "unused", whatever that means. We're
5650 * ignoring for now. */
5651 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5652 {
5653 /* register target */
5654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5655 IEM_MC_BEGIN(0, 0);
5656 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5657 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5658 } IEM_MC_ELSE() {
5659 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5660 } IEM_MC_ENDIF();
5661 IEM_MC_ADVANCE_RIP();
5662 IEM_MC_END();
5663 }
5664 else
5665 {
5666 /* memory target */
5667 IEM_MC_BEGIN(0, 1);
5668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5671 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5672 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5673 } IEM_MC_ELSE() {
5674 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5675 } IEM_MC_ENDIF();
5676 IEM_MC_ADVANCE_RIP();
5677 IEM_MC_END();
5678 }
5679 return VINF_SUCCESS;
5680}
5681
5682
5683/** Opcode 0x0f 0x95. */
5684FNIEMOP_DEF(iemOp_setne_Eb)
5685{
5686 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5687 IEMOP_HLP_MIN_386();
5688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5689
5690 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5691 * any way. AMD says it's "unused", whatever that means. We're
5692 * ignoring for now. */
5693 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5694 {
5695 /* register target */
5696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5697 IEM_MC_BEGIN(0, 0);
5698 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5699 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5700 } IEM_MC_ELSE() {
5701 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5702 } IEM_MC_ENDIF();
5703 IEM_MC_ADVANCE_RIP();
5704 IEM_MC_END();
5705 }
5706 else
5707 {
5708 /* memory target */
5709 IEM_MC_BEGIN(0, 1);
5710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5711 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5713 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5714 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5715 } IEM_MC_ELSE() {
5716 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5717 } IEM_MC_ENDIF();
5718 IEM_MC_ADVANCE_RIP();
5719 IEM_MC_END();
5720 }
5721 return VINF_SUCCESS;
5722}
5723
5724
5725/** Opcode 0x0f 0x96. */
5726FNIEMOP_DEF(iemOp_setbe_Eb)
5727{
5728 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5729 IEMOP_HLP_MIN_386();
5730 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5731
5732 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5733 * any way. AMD says it's "unused", whatever that means. We're
5734 * ignoring for now. */
5735 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5736 {
5737 /* register target */
5738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5739 IEM_MC_BEGIN(0, 0);
5740 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5741 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5742 } IEM_MC_ELSE() {
5743 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5744 } IEM_MC_ENDIF();
5745 IEM_MC_ADVANCE_RIP();
5746 IEM_MC_END();
5747 }
5748 else
5749 {
5750 /* memory target */
5751 IEM_MC_BEGIN(0, 1);
5752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5755 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5756 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5757 } IEM_MC_ELSE() {
5758 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5759 } IEM_MC_ENDIF();
5760 IEM_MC_ADVANCE_RIP();
5761 IEM_MC_END();
5762 }
5763 return VINF_SUCCESS;
5764}
5765
5766
5767/** Opcode 0x0f 0x97. */
5768FNIEMOP_DEF(iemOp_setnbe_Eb)
5769{
5770 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5771 IEMOP_HLP_MIN_386();
5772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5773
5774 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5775 * any way. AMD says it's "unused", whatever that means. We're
5776 * ignoring for now. */
5777 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5778 {
5779 /* register target */
5780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5781 IEM_MC_BEGIN(0, 0);
5782 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5783 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5784 } IEM_MC_ELSE() {
5785 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5786 } IEM_MC_ENDIF();
5787 IEM_MC_ADVANCE_RIP();
5788 IEM_MC_END();
5789 }
5790 else
5791 {
5792 /* memory target */
5793 IEM_MC_BEGIN(0, 1);
5794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5797 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5798 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5799 } IEM_MC_ELSE() {
5800 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5801 } IEM_MC_ENDIF();
5802 IEM_MC_ADVANCE_RIP();
5803 IEM_MC_END();
5804 }
5805 return VINF_SUCCESS;
5806}
5807
5808
5809/** Opcode 0x0f 0x98. */
5810FNIEMOP_DEF(iemOp_sets_Eb)
5811{
5812 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5813 IEMOP_HLP_MIN_386();
5814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5815
5816 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5817 * any way. AMD says it's "unused", whatever that means. We're
5818 * ignoring for now. */
5819 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5820 {
5821 /* register target */
5822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5823 IEM_MC_BEGIN(0, 0);
5824 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5825 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5826 } IEM_MC_ELSE() {
5827 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5828 } IEM_MC_ENDIF();
5829 IEM_MC_ADVANCE_RIP();
5830 IEM_MC_END();
5831 }
5832 else
5833 {
5834 /* memory target */
5835 IEM_MC_BEGIN(0, 1);
5836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5839 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5840 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5841 } IEM_MC_ELSE() {
5842 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5843 } IEM_MC_ENDIF();
5844 IEM_MC_ADVANCE_RIP();
5845 IEM_MC_END();
5846 }
5847 return VINF_SUCCESS;
5848}
5849
5850
5851/** Opcode 0x0f 0x99. */
5852FNIEMOP_DEF(iemOp_setns_Eb)
5853{
5854 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5855 IEMOP_HLP_MIN_386();
5856 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5857
5858 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5859 * any way. AMD says it's "unused", whatever that means. We're
5860 * ignoring for now. */
5861 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5862 {
5863 /* register target */
5864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5865 IEM_MC_BEGIN(0, 0);
5866 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5867 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5868 } IEM_MC_ELSE() {
5869 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5870 } IEM_MC_ENDIF();
5871 IEM_MC_ADVANCE_RIP();
5872 IEM_MC_END();
5873 }
5874 else
5875 {
5876 /* memory target */
5877 IEM_MC_BEGIN(0, 1);
5878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5879 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5881 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5882 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5883 } IEM_MC_ELSE() {
5884 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5885 } IEM_MC_ENDIF();
5886 IEM_MC_ADVANCE_RIP();
5887 IEM_MC_END();
5888 }
5889 return VINF_SUCCESS;
5890}
5891
5892
5893/** Opcode 0x0f 0x9a. */
5894FNIEMOP_DEF(iemOp_setp_Eb)
5895{
5896 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5897 IEMOP_HLP_MIN_386();
5898 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5899
5900 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5901 * any way. AMD says it's "unused", whatever that means. We're
5902 * ignoring for now. */
5903 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5904 {
5905 /* register target */
5906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5907 IEM_MC_BEGIN(0, 0);
5908 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5909 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5910 } IEM_MC_ELSE() {
5911 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5912 } IEM_MC_ENDIF();
5913 IEM_MC_ADVANCE_RIP();
5914 IEM_MC_END();
5915 }
5916 else
5917 {
5918 /* memory target */
5919 IEM_MC_BEGIN(0, 1);
5920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5923 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5924 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5925 } IEM_MC_ELSE() {
5926 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5927 } IEM_MC_ENDIF();
5928 IEM_MC_ADVANCE_RIP();
5929 IEM_MC_END();
5930 }
5931 return VINF_SUCCESS;
5932}
5933
5934
5935/** Opcode 0x0f 0x9b. */
5936FNIEMOP_DEF(iemOp_setnp_Eb)
5937{
5938 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5939 IEMOP_HLP_MIN_386();
5940 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5941
5942 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5943 * any way. AMD says it's "unused", whatever that means. We're
5944 * ignoring for now. */
5945 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5946 {
5947 /* register target */
5948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5949 IEM_MC_BEGIN(0, 0);
5950 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5951 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5952 } IEM_MC_ELSE() {
5953 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5954 } IEM_MC_ENDIF();
5955 IEM_MC_ADVANCE_RIP();
5956 IEM_MC_END();
5957 }
5958 else
5959 {
5960 /* memory target */
5961 IEM_MC_BEGIN(0, 1);
5962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5965 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5966 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5967 } IEM_MC_ELSE() {
5968 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5969 } IEM_MC_ENDIF();
5970 IEM_MC_ADVANCE_RIP();
5971 IEM_MC_END();
5972 }
5973 return VINF_SUCCESS;
5974}
5975
5976
5977/** Opcode 0x0f 0x9c. */
5978FNIEMOP_DEF(iemOp_setl_Eb)
5979{
5980 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5981 IEMOP_HLP_MIN_386();
5982 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5983
5984 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5985 * any way. AMD says it's "unused", whatever that means. We're
5986 * ignoring for now. */
5987 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5988 {
5989 /* register target */
5990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5991 IEM_MC_BEGIN(0, 0);
5992 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5993 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5994 } IEM_MC_ELSE() {
5995 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5996 } IEM_MC_ENDIF();
5997 IEM_MC_ADVANCE_RIP();
5998 IEM_MC_END();
5999 }
6000 else
6001 {
6002 /* memory target */
6003 IEM_MC_BEGIN(0, 1);
6004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6005 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6007 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6008 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6009 } IEM_MC_ELSE() {
6010 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6011 } IEM_MC_ENDIF();
6012 IEM_MC_ADVANCE_RIP();
6013 IEM_MC_END();
6014 }
6015 return VINF_SUCCESS;
6016}
6017
6018
6019/** Opcode 0x0f 0x9d. */
6020FNIEMOP_DEF(iemOp_setnl_Eb)
6021{
6022 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
6023 IEMOP_HLP_MIN_386();
6024 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6025
6026 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6027 * any way. AMD says it's "unused", whatever that means. We're
6028 * ignoring for now. */
6029 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6030 {
6031 /* register target */
6032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6033 IEM_MC_BEGIN(0, 0);
6034 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6035 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6036 } IEM_MC_ELSE() {
6037 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6038 } IEM_MC_ENDIF();
6039 IEM_MC_ADVANCE_RIP();
6040 IEM_MC_END();
6041 }
6042 else
6043 {
6044 /* memory target */
6045 IEM_MC_BEGIN(0, 1);
6046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6049 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6050 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6051 } IEM_MC_ELSE() {
6052 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6053 } IEM_MC_ENDIF();
6054 IEM_MC_ADVANCE_RIP();
6055 IEM_MC_END();
6056 }
6057 return VINF_SUCCESS;
6058}
6059
6060
6061/** Opcode 0x0f 0x9e. */
6062FNIEMOP_DEF(iemOp_setle_Eb)
6063{
6064 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
6065 IEMOP_HLP_MIN_386();
6066 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6067
6068 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6069 * any way. AMD says it's "unused", whatever that means. We're
6070 * ignoring for now. */
6071 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6072 {
6073 /* register target */
6074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6075 IEM_MC_BEGIN(0, 0);
6076 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6077 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6078 } IEM_MC_ELSE() {
6079 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6080 } IEM_MC_ENDIF();
6081 IEM_MC_ADVANCE_RIP();
6082 IEM_MC_END();
6083 }
6084 else
6085 {
6086 /* memory target */
6087 IEM_MC_BEGIN(0, 1);
6088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6091 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6092 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6093 } IEM_MC_ELSE() {
6094 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6095 } IEM_MC_ENDIF();
6096 IEM_MC_ADVANCE_RIP();
6097 IEM_MC_END();
6098 }
6099 return VINF_SUCCESS;
6100}
6101
6102
6103/** Opcode 0x0f 0x9f. */
6104FNIEMOP_DEF(iemOp_setnle_Eb)
6105{
6106 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
6107 IEMOP_HLP_MIN_386();
6108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6109
6110 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6111 * any way. AMD says it's "unused", whatever that means. We're
6112 * ignoring for now. */
6113 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6114 {
6115 /* register target */
6116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6117 IEM_MC_BEGIN(0, 0);
6118 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6119 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6120 } IEM_MC_ELSE() {
6121 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6122 } IEM_MC_ENDIF();
6123 IEM_MC_ADVANCE_RIP();
6124 IEM_MC_END();
6125 }
6126 else
6127 {
6128 /* memory target */
6129 IEM_MC_BEGIN(0, 1);
6130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6133 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6134 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6135 } IEM_MC_ELSE() {
6136 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6137 } IEM_MC_ENDIF();
6138 IEM_MC_ADVANCE_RIP();
6139 IEM_MC_END();
6140 }
6141 return VINF_SUCCESS;
6142}
6143
6144
6145/**
6146 * Common 'push segment-register' helper.
6147 */
6148FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
6149{
6150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6151 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
6152 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6153
6154 switch (pVCpu->iem.s.enmEffOpSize)
6155 {
6156 case IEMMODE_16BIT:
6157 IEM_MC_BEGIN(0, 1);
6158 IEM_MC_LOCAL(uint16_t, u16Value);
6159 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
6160 IEM_MC_PUSH_U16(u16Value);
6161 IEM_MC_ADVANCE_RIP();
6162 IEM_MC_END();
6163 break;
6164
6165 case IEMMODE_32BIT:
6166 IEM_MC_BEGIN(0, 1);
6167 IEM_MC_LOCAL(uint32_t, u32Value);
6168 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
6169 IEM_MC_PUSH_U32_SREG(u32Value);
6170 IEM_MC_ADVANCE_RIP();
6171 IEM_MC_END();
6172 break;
6173
6174 case IEMMODE_64BIT:
6175 IEM_MC_BEGIN(0, 1);
6176 IEM_MC_LOCAL(uint64_t, u64Value);
6177 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
6178 IEM_MC_PUSH_U64(u64Value);
6179 IEM_MC_ADVANCE_RIP();
6180 IEM_MC_END();
6181 break;
6182 }
6183
6184 return VINF_SUCCESS;
6185}
6186
6187
6188/** Opcode 0x0f 0xa0. */
6189FNIEMOP_DEF(iemOp_push_fs)
6190{
6191 IEMOP_MNEMONIC(push_fs, "push fs");
6192 IEMOP_HLP_MIN_386();
6193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6194 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
6195}
6196
6197
6198/** Opcode 0x0f 0xa1. */
6199FNIEMOP_DEF(iemOp_pop_fs)
6200{
6201 IEMOP_MNEMONIC(pop_fs, "pop fs");
6202 IEMOP_HLP_MIN_386();
6203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6204 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
6205}
6206
6207
6208/** Opcode 0x0f 0xa2. */
6209FNIEMOP_DEF(iemOp_cpuid)
6210{
6211 IEMOP_MNEMONIC(cpuid, "cpuid");
6212 IEMOP_HLP_MIN_486(); /* not all 486es. */
6213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6214 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
6215}
6216
6217
6218/**
6219 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
6220 * iemOp_bts_Ev_Gv.
6221 */
6222FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
6223{
6224 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6225 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6226
6227 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6228 {
6229 /* register destination. */
6230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6231 switch (pVCpu->iem.s.enmEffOpSize)
6232 {
6233 case IEMMODE_16BIT:
6234 IEM_MC_BEGIN(3, 0);
6235 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6236 IEM_MC_ARG(uint16_t, u16Src, 1);
6237 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6238
6239 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6240 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
6241 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6242 IEM_MC_REF_EFLAGS(pEFlags);
6243 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6244
6245 IEM_MC_ADVANCE_RIP();
6246 IEM_MC_END();
6247 return VINF_SUCCESS;
6248
6249 case IEMMODE_32BIT:
6250 IEM_MC_BEGIN(3, 0);
6251 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6252 IEM_MC_ARG(uint32_t, u32Src, 1);
6253 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6254
6255 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6256 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
6257 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6258 IEM_MC_REF_EFLAGS(pEFlags);
6259 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6260
6261 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6262 IEM_MC_ADVANCE_RIP();
6263 IEM_MC_END();
6264 return VINF_SUCCESS;
6265
6266 case IEMMODE_64BIT:
6267 IEM_MC_BEGIN(3, 0);
6268 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6269 IEM_MC_ARG(uint64_t, u64Src, 1);
6270 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6271
6272 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6273 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
6274 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6275 IEM_MC_REF_EFLAGS(pEFlags);
6276 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6277
6278 IEM_MC_ADVANCE_RIP();
6279 IEM_MC_END();
6280 return VINF_SUCCESS;
6281
6282 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6283 }
6284 }
6285 else
6286 {
6287 /* memory destination. */
6288
6289 uint32_t fAccess;
6290 if (pImpl->pfnLockedU16)
6291 fAccess = IEM_ACCESS_DATA_RW;
6292 else /* BT */
6293 fAccess = IEM_ACCESS_DATA_R;
6294
6295 /** @todo test negative bit offsets! */
6296 switch (pVCpu->iem.s.enmEffOpSize)
6297 {
6298 case IEMMODE_16BIT:
6299 IEM_MC_BEGIN(3, 2);
6300 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6301 IEM_MC_ARG(uint16_t, u16Src, 1);
6302 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6304 IEM_MC_LOCAL(int16_t, i16AddrAdj);
6305
6306 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6307 if (pImpl->pfnLockedU16)
6308 IEMOP_HLP_DONE_DECODING();
6309 else
6310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6311 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6312 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
6313 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
6314 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
6315 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
6316 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
6317 IEM_MC_FETCH_EFLAGS(EFlags);
6318
6319 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6320 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6321 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6322 else
6323 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6324 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6325
6326 IEM_MC_COMMIT_EFLAGS(EFlags);
6327 IEM_MC_ADVANCE_RIP();
6328 IEM_MC_END();
6329 return VINF_SUCCESS;
6330
6331 case IEMMODE_32BIT:
6332 IEM_MC_BEGIN(3, 2);
6333 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6334 IEM_MC_ARG(uint32_t, u32Src, 1);
6335 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6337 IEM_MC_LOCAL(int32_t, i32AddrAdj);
6338
6339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6340 if (pImpl->pfnLockedU16)
6341 IEMOP_HLP_DONE_DECODING();
6342 else
6343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6344 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6345 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
6346 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
6347 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
6348 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
6349 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
6350 IEM_MC_FETCH_EFLAGS(EFlags);
6351
6352 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6353 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6354 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6355 else
6356 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6357 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6358
6359 IEM_MC_COMMIT_EFLAGS(EFlags);
6360 IEM_MC_ADVANCE_RIP();
6361 IEM_MC_END();
6362 return VINF_SUCCESS;
6363
6364 case IEMMODE_64BIT:
6365 IEM_MC_BEGIN(3, 2);
6366 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6367 IEM_MC_ARG(uint64_t, u64Src, 1);
6368 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6370 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6371
6372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6373 if (pImpl->pfnLockedU16)
6374 IEMOP_HLP_DONE_DECODING();
6375 else
6376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6377 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6378 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6379 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6380 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6381 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6382 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6383 IEM_MC_FETCH_EFLAGS(EFlags);
6384
6385 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6386 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6387 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6388 else
6389 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6390 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6391
6392 IEM_MC_COMMIT_EFLAGS(EFlags);
6393 IEM_MC_ADVANCE_RIP();
6394 IEM_MC_END();
6395 return VINF_SUCCESS;
6396
6397 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6398 }
6399 }
6400}
6401
6402
6403/** Opcode 0x0f 0xa3. */
6404FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6405{
6406 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6407 IEMOP_HLP_MIN_386();
6408 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6409}
6410
6411
6412/**
6413 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6414 */
6415FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6416{
6417 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6418 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6419
6420 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6421 {
6422 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6424
6425 switch (pVCpu->iem.s.enmEffOpSize)
6426 {
6427 case IEMMODE_16BIT:
6428 IEM_MC_BEGIN(4, 0);
6429 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6430 IEM_MC_ARG(uint16_t, u16Src, 1);
6431 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6432 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6433
6434 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6435 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6436 IEM_MC_REF_EFLAGS(pEFlags);
6437 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6438
6439 IEM_MC_ADVANCE_RIP();
6440 IEM_MC_END();
6441 return VINF_SUCCESS;
6442
6443 case IEMMODE_32BIT:
6444 IEM_MC_BEGIN(4, 0);
6445 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6446 IEM_MC_ARG(uint32_t, u32Src, 1);
6447 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6448 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6449
6450 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6451 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6452 IEM_MC_REF_EFLAGS(pEFlags);
6453 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6454
6455 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6456 IEM_MC_ADVANCE_RIP();
6457 IEM_MC_END();
6458 return VINF_SUCCESS;
6459
6460 case IEMMODE_64BIT:
6461 IEM_MC_BEGIN(4, 0);
6462 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6463 IEM_MC_ARG(uint64_t, u64Src, 1);
6464 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6465 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6466
6467 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6468 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6469 IEM_MC_REF_EFLAGS(pEFlags);
6470 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6471
6472 IEM_MC_ADVANCE_RIP();
6473 IEM_MC_END();
6474 return VINF_SUCCESS;
6475
6476 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6477 }
6478 }
6479 else
6480 {
6481 switch (pVCpu->iem.s.enmEffOpSize)
6482 {
6483 case IEMMODE_16BIT:
6484 IEM_MC_BEGIN(4, 2);
6485 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6486 IEM_MC_ARG(uint16_t, u16Src, 1);
6487 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6488 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6490
6491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6492 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6493 IEM_MC_ASSIGN(cShiftArg, cShift);
6494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6495 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6496 IEM_MC_FETCH_EFLAGS(EFlags);
6497 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6498 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6499
6500 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6501 IEM_MC_COMMIT_EFLAGS(EFlags);
6502 IEM_MC_ADVANCE_RIP();
6503 IEM_MC_END();
6504 return VINF_SUCCESS;
6505
6506 case IEMMODE_32BIT:
6507 IEM_MC_BEGIN(4, 2);
6508 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6509 IEM_MC_ARG(uint32_t, u32Src, 1);
6510 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6511 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6513
6514 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6515 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6516 IEM_MC_ASSIGN(cShiftArg, cShift);
6517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6518 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6519 IEM_MC_FETCH_EFLAGS(EFlags);
6520 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6521 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6522
6523 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6524 IEM_MC_COMMIT_EFLAGS(EFlags);
6525 IEM_MC_ADVANCE_RIP();
6526 IEM_MC_END();
6527 return VINF_SUCCESS;
6528
6529 case IEMMODE_64BIT:
6530 IEM_MC_BEGIN(4, 2);
6531 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6532 IEM_MC_ARG(uint64_t, u64Src, 1);
6533 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6534 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6536
6537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6538 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6539 IEM_MC_ASSIGN(cShiftArg, cShift);
6540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6541 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6542 IEM_MC_FETCH_EFLAGS(EFlags);
6543 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6544 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6545
6546 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6547 IEM_MC_COMMIT_EFLAGS(EFlags);
6548 IEM_MC_ADVANCE_RIP();
6549 IEM_MC_END();
6550 return VINF_SUCCESS;
6551
6552 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6553 }
6554 }
6555}
6556
6557
6558/**
6559 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6560 */
6561FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6562{
6563 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6564 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6565
6566 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6567 {
6568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6569
6570 switch (pVCpu->iem.s.enmEffOpSize)
6571 {
6572 case IEMMODE_16BIT:
6573 IEM_MC_BEGIN(4, 0);
6574 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6575 IEM_MC_ARG(uint16_t, u16Src, 1);
6576 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6577 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6578
6579 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6580 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6581 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6582 IEM_MC_REF_EFLAGS(pEFlags);
6583 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6584
6585 IEM_MC_ADVANCE_RIP();
6586 IEM_MC_END();
6587 return VINF_SUCCESS;
6588
6589 case IEMMODE_32BIT:
6590 IEM_MC_BEGIN(4, 0);
6591 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6592 IEM_MC_ARG(uint32_t, u32Src, 1);
6593 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6594 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6595
6596 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6597 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6598 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6599 IEM_MC_REF_EFLAGS(pEFlags);
6600 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6601
6602 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6603 IEM_MC_ADVANCE_RIP();
6604 IEM_MC_END();
6605 return VINF_SUCCESS;
6606
6607 case IEMMODE_64BIT:
6608 IEM_MC_BEGIN(4, 0);
6609 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6610 IEM_MC_ARG(uint64_t, u64Src, 1);
6611 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6612 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6613
6614 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6615 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6616 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6617 IEM_MC_REF_EFLAGS(pEFlags);
6618 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6619
6620 IEM_MC_ADVANCE_RIP();
6621 IEM_MC_END();
6622 return VINF_SUCCESS;
6623
6624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6625 }
6626 }
6627 else
6628 {
6629 switch (pVCpu->iem.s.enmEffOpSize)
6630 {
6631 case IEMMODE_16BIT:
6632 IEM_MC_BEGIN(4, 2);
6633 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6634 IEM_MC_ARG(uint16_t, u16Src, 1);
6635 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6636 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6638
6639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6641 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6642 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6643 IEM_MC_FETCH_EFLAGS(EFlags);
6644 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6645 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6646
6647 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6648 IEM_MC_COMMIT_EFLAGS(EFlags);
6649 IEM_MC_ADVANCE_RIP();
6650 IEM_MC_END();
6651 return VINF_SUCCESS;
6652
6653 case IEMMODE_32BIT:
6654 IEM_MC_BEGIN(4, 2);
6655 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6656 IEM_MC_ARG(uint32_t, u32Src, 1);
6657 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6658 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6660
6661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6663 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6664 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6665 IEM_MC_FETCH_EFLAGS(EFlags);
6666 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6667 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6668
6669 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6670 IEM_MC_COMMIT_EFLAGS(EFlags);
6671 IEM_MC_ADVANCE_RIP();
6672 IEM_MC_END();
6673 return VINF_SUCCESS;
6674
6675 case IEMMODE_64BIT:
6676 IEM_MC_BEGIN(4, 2);
6677 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6678 IEM_MC_ARG(uint64_t, u64Src, 1);
6679 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6680 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6681 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6682
6683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6685 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6686 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6687 IEM_MC_FETCH_EFLAGS(EFlags);
6688 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6689 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6690
6691 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6692 IEM_MC_COMMIT_EFLAGS(EFlags);
6693 IEM_MC_ADVANCE_RIP();
6694 IEM_MC_END();
6695 return VINF_SUCCESS;
6696
6697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6698 }
6699 }
6700}
6701
6702
6703
6704/** Opcode 0x0f 0xa4. */
6705FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6706{
6707 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6708 IEMOP_HLP_MIN_386();
6709 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6710}
6711
6712
6713/** Opcode 0x0f 0xa5. */
6714FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6715{
6716 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6717 IEMOP_HLP_MIN_386();
6718 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6719}
6720
6721
6722/** Opcode 0x0f 0xa8. */
6723FNIEMOP_DEF(iemOp_push_gs)
6724{
6725 IEMOP_MNEMONIC(push_gs, "push gs");
6726 IEMOP_HLP_MIN_386();
6727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6728 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6729}
6730
6731
6732/** Opcode 0x0f 0xa9. */
6733FNIEMOP_DEF(iemOp_pop_gs)
6734{
6735 IEMOP_MNEMONIC(pop_gs, "pop gs");
6736 IEMOP_HLP_MIN_386();
6737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6738 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6739}
6740
6741
6742/** Opcode 0x0f 0xaa. */
6743FNIEMOP_DEF(iemOp_rsm)
6744{
6745 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
6746 IEMOP_HLP_MIN_386(); /* 386SL and later. */
6747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6748 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
6749}
6750
6751
6752
6753/** Opcode 0x0f 0xab. */
6754FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6755{
6756 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6757 IEMOP_HLP_MIN_386();
6758 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6759}
6760
6761
6762/** Opcode 0x0f 0xac. */
6763FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6764{
6765 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6766 IEMOP_HLP_MIN_386();
6767 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6768}
6769
6770
6771/** Opcode 0x0f 0xad. */
6772FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6773{
6774 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6775 IEMOP_HLP_MIN_386();
6776 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6777}
6778
6779
6780/** Opcode 0x0f 0xae mem/0. */
6781FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6782{
6783 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6784 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6785 return IEMOP_RAISE_INVALID_OPCODE();
6786
6787 IEM_MC_BEGIN(3, 1);
6788 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6789 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6790 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6793 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6794 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6795 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6796 IEM_MC_END();
6797 return VINF_SUCCESS;
6798}
6799
6800
6801/** Opcode 0x0f 0xae mem/1. */
6802FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6803{
6804 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6805 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6806 return IEMOP_RAISE_INVALID_OPCODE();
6807
6808 IEM_MC_BEGIN(3, 1);
6809 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6810 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6811 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6814 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6815 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6816 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6817 IEM_MC_END();
6818 return VINF_SUCCESS;
6819}
6820
6821
6822/**
6823 * @opmaps grp15
6824 * @opcode !11/2
6825 * @oppfx none
6826 * @opcpuid sse
6827 * @opgroup og_sse_mxcsrsm
6828 * @opxcpttype 5
6829 * @optest op1=0 -> mxcsr=0
6830 * @optest op1=0x2083 -> mxcsr=0x2083
6831 * @optest op1=0xfffffffe -> value.xcpt=0xd
6832 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6833 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6834 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6835 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6836 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6837 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6838 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6839 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6840 */
6841FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6842{
6843 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6844 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6845 return IEMOP_RAISE_INVALID_OPCODE();
6846
6847 IEM_MC_BEGIN(2, 0);
6848 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6849 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6852 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6853 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6854 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6855 IEM_MC_END();
6856 return VINF_SUCCESS;
6857}
6858
6859
6860/**
6861 * @opmaps grp15
6862 * @opcode !11/3
6863 * @oppfx none
6864 * @opcpuid sse
6865 * @opgroup og_sse_mxcsrsm
6866 * @opxcpttype 5
6867 * @optest mxcsr=0 -> op1=0
6868 * @optest mxcsr=0x2083 -> op1=0x2083
6869 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6870 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6871 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6872 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6873 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6874 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6875 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6876 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6877 */
6878FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6879{
6880 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6881 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6882 return IEMOP_RAISE_INVALID_OPCODE();
6883
6884 IEM_MC_BEGIN(2, 0);
6885 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6886 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6887 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6889 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6890 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6891 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6892 IEM_MC_END();
6893 return VINF_SUCCESS;
6894}
6895
6896
6897/**
6898 * @opmaps grp15
6899 * @opcode !11/4
6900 * @oppfx none
6901 * @opcpuid xsave
6902 * @opgroup og_system
6903 * @opxcpttype none
6904 */
6905FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6906{
6907 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
6908 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6909 return IEMOP_RAISE_INVALID_OPCODE();
6910
6911 IEM_MC_BEGIN(3, 0);
6912 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6913 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6914 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6917 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6918 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6919 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6920 IEM_MC_END();
6921 return VINF_SUCCESS;
6922}
6923
6924
6925/**
6926 * @opmaps grp15
6927 * @opcode !11/5
6928 * @oppfx none
6929 * @opcpuid xsave
6930 * @opgroup og_system
6931 * @opxcpttype none
6932 */
6933FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6934{
6935 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
6936 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6937 return IEMOP_RAISE_INVALID_OPCODE();
6938
6939 IEM_MC_BEGIN(3, 0);
6940 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6941 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6942 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6943 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6945 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6946 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6947 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6948 IEM_MC_END();
6949 return VINF_SUCCESS;
6950}
6951
6952/** Opcode 0x0f 0xae mem/6. */
6953FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6954
6955/**
6956 * @opmaps grp15
6957 * @opcode !11/7
6958 * @oppfx none
6959 * @opcpuid clfsh
6960 * @opgroup og_cachectl
6961 * @optest op1=1 ->
6962 */
6963FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6964{
6965 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6966 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6967 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6968
6969 IEM_MC_BEGIN(2, 0);
6970 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6971 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6974 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6975 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6976 IEM_MC_END();
6977 return VINF_SUCCESS;
6978}
6979
6980/**
6981 * @opmaps grp15
6982 * @opcode !11/7
6983 * @oppfx 0x66
6984 * @opcpuid clflushopt
6985 * @opgroup og_cachectl
6986 * @optest op1=1 ->
6987 */
6988FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6989{
6990 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6991 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6992 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6993
6994 IEM_MC_BEGIN(2, 0);
6995 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6996 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6997 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6999 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7000 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
7001 IEM_MC_END();
7002 return VINF_SUCCESS;
7003}
7004
7005
7006/** Opcode 0x0f 0xae 11b/5. */
7007FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
7008{
7009 RT_NOREF_PV(bRm);
7010 IEMOP_MNEMONIC(lfence, "lfence");
7011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7012 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7013 return IEMOP_RAISE_INVALID_OPCODE();
7014
7015 IEM_MC_BEGIN(0, 0);
7016 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7017 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
7018 else
7019 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7020 IEM_MC_ADVANCE_RIP();
7021 IEM_MC_END();
7022 return VINF_SUCCESS;
7023}
7024
7025
7026/** Opcode 0x0f 0xae 11b/6. */
7027FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
7028{
7029 RT_NOREF_PV(bRm);
7030 IEMOP_MNEMONIC(mfence, "mfence");
7031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7032 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7033 return IEMOP_RAISE_INVALID_OPCODE();
7034
7035 IEM_MC_BEGIN(0, 0);
7036 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7037 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
7038 else
7039 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7040 IEM_MC_ADVANCE_RIP();
7041 IEM_MC_END();
7042 return VINF_SUCCESS;
7043}
7044
7045
7046/** Opcode 0x0f 0xae 11b/7. */
7047FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
7048{
7049 RT_NOREF_PV(bRm);
7050 IEMOP_MNEMONIC(sfence, "sfence");
7051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7052 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7053 return IEMOP_RAISE_INVALID_OPCODE();
7054
7055 IEM_MC_BEGIN(0, 0);
7056 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7057 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
7058 else
7059 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7060 IEM_MC_ADVANCE_RIP();
7061 IEM_MC_END();
7062 return VINF_SUCCESS;
7063}
7064
7065
7066/** Opcode 0xf3 0x0f 0xae 11b/0. */
7067FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
7068{
7069 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
7070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7071 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7072 {
7073 IEM_MC_BEGIN(1, 0);
7074 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7075 IEM_MC_ARG(uint64_t, u64Dst, 0);
7076 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
7077 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
7078 IEM_MC_ADVANCE_RIP();
7079 IEM_MC_END();
7080 }
7081 else
7082 {
7083 IEM_MC_BEGIN(1, 0);
7084 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7085 IEM_MC_ARG(uint32_t, u32Dst, 0);
7086 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
7087 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
7088 IEM_MC_ADVANCE_RIP();
7089 IEM_MC_END();
7090 }
7091 return VINF_SUCCESS;
7092}
7093
7094
7095/** Opcode 0xf3 0x0f 0xae 11b/1. */
7096FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
7097{
7098 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
7099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7100 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7101 {
7102 IEM_MC_BEGIN(1, 0);
7103 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7104 IEM_MC_ARG(uint64_t, u64Dst, 0);
7105 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
7106 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
7107 IEM_MC_ADVANCE_RIP();
7108 IEM_MC_END();
7109 }
7110 else
7111 {
7112 IEM_MC_BEGIN(1, 0);
7113 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7114 IEM_MC_ARG(uint32_t, u32Dst, 0);
7115 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
7116 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
7117 IEM_MC_ADVANCE_RIP();
7118 IEM_MC_END();
7119 }
7120 return VINF_SUCCESS;
7121}
7122
7123
7124/** Opcode 0xf3 0x0f 0xae 11b/2. */
7125FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
7126{
7127 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
7128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7129 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7130 {
7131 IEM_MC_BEGIN(1, 0);
7132 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7133 IEM_MC_ARG(uint64_t, u64Dst, 0);
7134 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7135 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7136 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
7137 IEM_MC_ADVANCE_RIP();
7138 IEM_MC_END();
7139 }
7140 else
7141 {
7142 IEM_MC_BEGIN(1, 0);
7143 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7144 IEM_MC_ARG(uint32_t, u32Dst, 0);
7145 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7146 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
7147 IEM_MC_ADVANCE_RIP();
7148 IEM_MC_END();
7149 }
7150 return VINF_SUCCESS;
7151}
7152
7153
7154/** Opcode 0xf3 0x0f 0xae 11b/3. */
7155FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
7156{
7157 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
7158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7159 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7160 {
7161 IEM_MC_BEGIN(1, 0);
7162 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7163 IEM_MC_ARG(uint64_t, u64Dst, 0);
7164 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7165 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7166 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
7167 IEM_MC_ADVANCE_RIP();
7168 IEM_MC_END();
7169 }
7170 else
7171 {
7172 IEM_MC_BEGIN(1, 0);
7173 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7174 IEM_MC_ARG(uint32_t, u32Dst, 0);
7175 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7176 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
7177 IEM_MC_ADVANCE_RIP();
7178 IEM_MC_END();
7179 }
7180 return VINF_SUCCESS;
7181}
7182
7183
7184/**
7185 * Group 15 jump table for register variant.
7186 */
7187IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
7188{ /* pfx: none, 066h, 0f3h, 0f2h */
7189 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
7190 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
7191 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
7192 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
7193 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7194 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7195 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7196 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7197};
7198AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
7199
7200
7201/**
7202 * Group 15 jump table for memory variant.
7203 */
7204IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
7205{ /* pfx: none, 066h, 0f3h, 0f2h */
7206 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7207 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7208 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7209 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7210 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7211 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7212 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7213 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7214};
7215AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
7216
7217
7218/** Opcode 0x0f 0xae. */
7219FNIEMOP_DEF(iemOp_Grp15)
7220{
7221 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
7222 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7223 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7224 /* register, register */
7225 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7226 + pVCpu->iem.s.idxPrefix], bRm);
7227 /* memory, register */
7228 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7229 + pVCpu->iem.s.idxPrefix], bRm);
7230}
7231
7232
7233/** Opcode 0x0f 0xaf. */
7234FNIEMOP_DEF(iemOp_imul_Gv_Ev)
7235{
7236 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
7237 IEMOP_HLP_MIN_386();
7238 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7239 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
7240}
7241
7242
7243/** Opcode 0x0f 0xb0. */
7244FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
7245{
7246 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
7247 IEMOP_HLP_MIN_486();
7248 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7249
7250 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7251 {
7252 IEMOP_HLP_DONE_DECODING();
7253 IEM_MC_BEGIN(4, 0);
7254 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7255 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7256 IEM_MC_ARG(uint8_t, u8Src, 2);
7257 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7258
7259 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7260 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7261 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
7262 IEM_MC_REF_EFLAGS(pEFlags);
7263 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7264 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7265 else
7266 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7267
7268 IEM_MC_ADVANCE_RIP();
7269 IEM_MC_END();
7270 }
7271 else
7272 {
7273 IEM_MC_BEGIN(4, 3);
7274 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7275 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7276 IEM_MC_ARG(uint8_t, u8Src, 2);
7277 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7279 IEM_MC_LOCAL(uint8_t, u8Al);
7280
7281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7282 IEMOP_HLP_DONE_DECODING();
7283 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7284 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7285 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
7286 IEM_MC_FETCH_EFLAGS(EFlags);
7287 IEM_MC_REF_LOCAL(pu8Al, u8Al);
7288 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7289 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7290 else
7291 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7292
7293 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7294 IEM_MC_COMMIT_EFLAGS(EFlags);
7295 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
7296 IEM_MC_ADVANCE_RIP();
7297 IEM_MC_END();
7298 }
7299 return VINF_SUCCESS;
7300}
7301
7302/** Opcode 0x0f 0xb1. */
7303FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
7304{
7305 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
7306 IEMOP_HLP_MIN_486();
7307 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7308
7309 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7310 {
7311 IEMOP_HLP_DONE_DECODING();
7312 switch (pVCpu->iem.s.enmEffOpSize)
7313 {
7314 case IEMMODE_16BIT:
7315 IEM_MC_BEGIN(4, 0);
7316 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7317 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7318 IEM_MC_ARG(uint16_t, u16Src, 2);
7319 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7320
7321 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7322 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7323 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
7324 IEM_MC_REF_EFLAGS(pEFlags);
7325 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7326 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7327 else
7328 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7329
7330 IEM_MC_ADVANCE_RIP();
7331 IEM_MC_END();
7332 return VINF_SUCCESS;
7333
7334 case IEMMODE_32BIT:
7335 IEM_MC_BEGIN(4, 0);
7336 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7337 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7338 IEM_MC_ARG(uint32_t, u32Src, 2);
7339 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7340
7341 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7342 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7343 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
7344 IEM_MC_REF_EFLAGS(pEFlags);
7345 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7346 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7347 else
7348 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7349
7350 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
7351 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7352 IEM_MC_ADVANCE_RIP();
7353 IEM_MC_END();
7354 return VINF_SUCCESS;
7355
7356 case IEMMODE_64BIT:
7357 IEM_MC_BEGIN(4, 0);
7358 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7359 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7360#ifdef RT_ARCH_X86
7361 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7362#else
7363 IEM_MC_ARG(uint64_t, u64Src, 2);
7364#endif
7365 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7366
7367 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7368 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
7369 IEM_MC_REF_EFLAGS(pEFlags);
7370#ifdef RT_ARCH_X86
7371 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7372 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7373 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7374 else
7375 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7376#else
7377 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7378 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7379 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7380 else
7381 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7382#endif
7383
7384 IEM_MC_ADVANCE_RIP();
7385 IEM_MC_END();
7386 return VINF_SUCCESS;
7387
7388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7389 }
7390 }
7391 else
7392 {
7393 switch (pVCpu->iem.s.enmEffOpSize)
7394 {
7395 case IEMMODE_16BIT:
7396 IEM_MC_BEGIN(4, 3);
7397 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7398 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7399 IEM_MC_ARG(uint16_t, u16Src, 2);
7400 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7401 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7402 IEM_MC_LOCAL(uint16_t, u16Ax);
7403
7404 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7405 IEMOP_HLP_DONE_DECODING();
7406 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7407 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7408 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
7409 IEM_MC_FETCH_EFLAGS(EFlags);
7410 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
7411 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7412 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7413 else
7414 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7415
7416 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7417 IEM_MC_COMMIT_EFLAGS(EFlags);
7418 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
7419 IEM_MC_ADVANCE_RIP();
7420 IEM_MC_END();
7421 return VINF_SUCCESS;
7422
7423 case IEMMODE_32BIT:
7424 IEM_MC_BEGIN(4, 3);
7425 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7426 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7427 IEM_MC_ARG(uint32_t, u32Src, 2);
7428 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7430 IEM_MC_LOCAL(uint32_t, u32Eax);
7431
7432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7433 IEMOP_HLP_DONE_DECODING();
7434 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7435 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7436 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
7437 IEM_MC_FETCH_EFLAGS(EFlags);
7438 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
7439 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7440 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7441 else
7442 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7443
7444 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7445 IEM_MC_COMMIT_EFLAGS(EFlags);
7446 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
7447 IEM_MC_ADVANCE_RIP();
7448 IEM_MC_END();
7449 return VINF_SUCCESS;
7450
7451 case IEMMODE_64BIT:
7452 IEM_MC_BEGIN(4, 3);
7453 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7454 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7455#ifdef RT_ARCH_X86
7456 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7457#else
7458 IEM_MC_ARG(uint64_t, u64Src, 2);
7459#endif
7460 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7462 IEM_MC_LOCAL(uint64_t, u64Rax);
7463
7464 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7465 IEMOP_HLP_DONE_DECODING();
7466 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7467 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
7468 IEM_MC_FETCH_EFLAGS(EFlags);
7469 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
7470#ifdef RT_ARCH_X86
7471 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7472 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7473 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7474 else
7475 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7476#else
7477 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7478 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7479 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7480 else
7481 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7482#endif
7483
7484 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7485 IEM_MC_COMMIT_EFLAGS(EFlags);
7486 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
7487 IEM_MC_ADVANCE_RIP();
7488 IEM_MC_END();
7489 return VINF_SUCCESS;
7490
7491 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7492 }
7493 }
7494}
7495
7496
7497FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
7498{
7499 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
7500 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
7501
7502 switch (pVCpu->iem.s.enmEffOpSize)
7503 {
7504 case IEMMODE_16BIT:
7505 IEM_MC_BEGIN(5, 1);
7506 IEM_MC_ARG(uint16_t, uSel, 0);
7507 IEM_MC_ARG(uint16_t, offSeg, 1);
7508 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7509 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7510 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7511 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7514 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7515 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
7516 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7517 IEM_MC_END();
7518 return VINF_SUCCESS;
7519
7520 case IEMMODE_32BIT:
7521 IEM_MC_BEGIN(5, 1);
7522 IEM_MC_ARG(uint16_t, uSel, 0);
7523 IEM_MC_ARG(uint32_t, offSeg, 1);
7524 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7525 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7526 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7527 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7530 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7531 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
7532 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7533 IEM_MC_END();
7534 return VINF_SUCCESS;
7535
7536 case IEMMODE_64BIT:
7537 IEM_MC_BEGIN(5, 1);
7538 IEM_MC_ARG(uint16_t, uSel, 0);
7539 IEM_MC_ARG(uint64_t, offSeg, 1);
7540 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7541 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7542 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7543 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7546 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
7547 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7548 else
7549 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7550 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
7551 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7552 IEM_MC_END();
7553 return VINF_SUCCESS;
7554
7555 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7556 }
7557}
7558
7559
7560/** Opcode 0x0f 0xb2. */
7561FNIEMOP_DEF(iemOp_lss_Gv_Mp)
7562{
7563 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
7564 IEMOP_HLP_MIN_386();
7565 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7566 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7567 return IEMOP_RAISE_INVALID_OPCODE();
7568 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
7569}
7570
7571
7572/** Opcode 0x0f 0xb3. */
7573FNIEMOP_DEF(iemOp_btr_Ev_Gv)
7574{
7575 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
7576 IEMOP_HLP_MIN_386();
7577 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
7578}
7579
7580
7581/** Opcode 0x0f 0xb4. */
7582FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
7583{
7584 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
7585 IEMOP_HLP_MIN_386();
7586 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7587 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7588 return IEMOP_RAISE_INVALID_OPCODE();
7589 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
7590}
7591
7592
7593/** Opcode 0x0f 0xb5. */
7594FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
7595{
7596 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
7597 IEMOP_HLP_MIN_386();
7598 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7599 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7600 return IEMOP_RAISE_INVALID_OPCODE();
7601 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
7602}
7603
7604
7605/** Opcode 0x0f 0xb6. */
7606FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
7607{
7608 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
7609 IEMOP_HLP_MIN_386();
7610
7611 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7612
7613 /*
7614 * If rm is denoting a register, no more instruction bytes.
7615 */
7616 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7617 {
7618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7619 switch (pVCpu->iem.s.enmEffOpSize)
7620 {
7621 case IEMMODE_16BIT:
7622 IEM_MC_BEGIN(0, 1);
7623 IEM_MC_LOCAL(uint16_t, u16Value);
7624 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7625 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7626 IEM_MC_ADVANCE_RIP();
7627 IEM_MC_END();
7628 return VINF_SUCCESS;
7629
7630 case IEMMODE_32BIT:
7631 IEM_MC_BEGIN(0, 1);
7632 IEM_MC_LOCAL(uint32_t, u32Value);
7633 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7634 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7635 IEM_MC_ADVANCE_RIP();
7636 IEM_MC_END();
7637 return VINF_SUCCESS;
7638
7639 case IEMMODE_64BIT:
7640 IEM_MC_BEGIN(0, 1);
7641 IEM_MC_LOCAL(uint64_t, u64Value);
7642 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7643 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7644 IEM_MC_ADVANCE_RIP();
7645 IEM_MC_END();
7646 return VINF_SUCCESS;
7647
7648 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7649 }
7650 }
7651 else
7652 {
7653 /*
7654 * We're loading a register from memory.
7655 */
7656 switch (pVCpu->iem.s.enmEffOpSize)
7657 {
7658 case IEMMODE_16BIT:
7659 IEM_MC_BEGIN(0, 2);
7660 IEM_MC_LOCAL(uint16_t, u16Value);
7661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7664 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7665 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7666 IEM_MC_ADVANCE_RIP();
7667 IEM_MC_END();
7668 return VINF_SUCCESS;
7669
7670 case IEMMODE_32BIT:
7671 IEM_MC_BEGIN(0, 2);
7672 IEM_MC_LOCAL(uint32_t, u32Value);
7673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7676 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7677 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7678 IEM_MC_ADVANCE_RIP();
7679 IEM_MC_END();
7680 return VINF_SUCCESS;
7681
7682 case IEMMODE_64BIT:
7683 IEM_MC_BEGIN(0, 2);
7684 IEM_MC_LOCAL(uint64_t, u64Value);
7685 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7686 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7688 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7689 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7690 IEM_MC_ADVANCE_RIP();
7691 IEM_MC_END();
7692 return VINF_SUCCESS;
7693
7694 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7695 }
7696 }
7697}
7698
7699
7700/** Opcode 0x0f 0xb7. */
7701FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7702{
7703 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7704 IEMOP_HLP_MIN_386();
7705
7706 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7707
7708 /** @todo Not entirely sure how the operand size prefix is handled here,
7709 * assuming that it will be ignored. Would be nice to have a few
7710 * test for this. */
7711 /*
7712 * If rm is denoting a register, no more instruction bytes.
7713 */
7714 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7715 {
7716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7717 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7718 {
7719 IEM_MC_BEGIN(0, 1);
7720 IEM_MC_LOCAL(uint32_t, u32Value);
7721 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7722 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7723 IEM_MC_ADVANCE_RIP();
7724 IEM_MC_END();
7725 }
7726 else
7727 {
7728 IEM_MC_BEGIN(0, 1);
7729 IEM_MC_LOCAL(uint64_t, u64Value);
7730 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7731 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7732 IEM_MC_ADVANCE_RIP();
7733 IEM_MC_END();
7734 }
7735 }
7736 else
7737 {
7738 /*
7739 * We're loading a register from memory.
7740 */
7741 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7742 {
7743 IEM_MC_BEGIN(0, 2);
7744 IEM_MC_LOCAL(uint32_t, u32Value);
7745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7748 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7749 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7750 IEM_MC_ADVANCE_RIP();
7751 IEM_MC_END();
7752 }
7753 else
7754 {
7755 IEM_MC_BEGIN(0, 2);
7756 IEM_MC_LOCAL(uint64_t, u64Value);
7757 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7760 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7761 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7762 IEM_MC_ADVANCE_RIP();
7763 IEM_MC_END();
7764 }
7765 }
7766 return VINF_SUCCESS;
7767}
7768
7769
7770/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7771FNIEMOP_UD_STUB(iemOp_jmpe);
7772/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7773FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7774
7775
7776/**
7777 * @opcode 0xb9
7778 * @opinvalid intel-modrm
7779 * @optest ->
7780 */
7781FNIEMOP_DEF(iemOp_Grp10)
7782{
7783 /*
7784 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7785 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7786 */
7787 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7788 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
7789 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7790}
7791
7792
7793/** Opcode 0x0f 0xba. */
7794FNIEMOP_DEF(iemOp_Grp8)
7795{
7796 IEMOP_HLP_MIN_386();
7797 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7798 PCIEMOPBINSIZES pImpl;
7799 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7800 {
7801 case 0: case 1: case 2: case 3:
7802 /* Both AMD and Intel want full modr/m decoding and imm8. */
7803 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7804 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7805 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7806 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7807 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7808 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7809 }
7810 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7811
7812 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7813 {
7814 /* register destination. */
7815 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7817
7818 switch (pVCpu->iem.s.enmEffOpSize)
7819 {
7820 case IEMMODE_16BIT:
7821 IEM_MC_BEGIN(3, 0);
7822 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7823 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7824 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7825
7826 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7827 IEM_MC_REF_EFLAGS(pEFlags);
7828 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7829
7830 IEM_MC_ADVANCE_RIP();
7831 IEM_MC_END();
7832 return VINF_SUCCESS;
7833
7834 case IEMMODE_32BIT:
7835 IEM_MC_BEGIN(3, 0);
7836 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7837 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7838 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7839
7840 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7841 IEM_MC_REF_EFLAGS(pEFlags);
7842 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7843
7844 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7845 IEM_MC_ADVANCE_RIP();
7846 IEM_MC_END();
7847 return VINF_SUCCESS;
7848
7849 case IEMMODE_64BIT:
7850 IEM_MC_BEGIN(3, 0);
7851 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7852 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7853 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7854
7855 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7856 IEM_MC_REF_EFLAGS(pEFlags);
7857 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7858
7859 IEM_MC_ADVANCE_RIP();
7860 IEM_MC_END();
7861 return VINF_SUCCESS;
7862
7863 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7864 }
7865 }
7866 else
7867 {
7868 /* memory destination. */
7869
7870 uint32_t fAccess;
7871 if (pImpl->pfnLockedU16)
7872 fAccess = IEM_ACCESS_DATA_RW;
7873 else /* BT */
7874 fAccess = IEM_ACCESS_DATA_R;
7875
7876 /** @todo test negative bit offsets! */
7877 switch (pVCpu->iem.s.enmEffOpSize)
7878 {
7879 case IEMMODE_16BIT:
7880 IEM_MC_BEGIN(3, 1);
7881 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7882 IEM_MC_ARG(uint16_t, u16Src, 1);
7883 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7884 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7885
7886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7887 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7888 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7889 if (pImpl->pfnLockedU16)
7890 IEMOP_HLP_DONE_DECODING();
7891 else
7892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7893 IEM_MC_FETCH_EFLAGS(EFlags);
7894 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7895 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7896 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7897 else
7898 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7899 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7900
7901 IEM_MC_COMMIT_EFLAGS(EFlags);
7902 IEM_MC_ADVANCE_RIP();
7903 IEM_MC_END();
7904 return VINF_SUCCESS;
7905
7906 case IEMMODE_32BIT:
7907 IEM_MC_BEGIN(3, 1);
7908 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7909 IEM_MC_ARG(uint32_t, u32Src, 1);
7910 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7912
7913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7914 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7915 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7916 if (pImpl->pfnLockedU16)
7917 IEMOP_HLP_DONE_DECODING();
7918 else
7919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7920 IEM_MC_FETCH_EFLAGS(EFlags);
7921 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7922 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7923 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7924 else
7925 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7926 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7927
7928 IEM_MC_COMMIT_EFLAGS(EFlags);
7929 IEM_MC_ADVANCE_RIP();
7930 IEM_MC_END();
7931 return VINF_SUCCESS;
7932
7933 case IEMMODE_64BIT:
7934 IEM_MC_BEGIN(3, 1);
7935 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7936 IEM_MC_ARG(uint64_t, u64Src, 1);
7937 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7938 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7939
7940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7941 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7942 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7943 if (pImpl->pfnLockedU16)
7944 IEMOP_HLP_DONE_DECODING();
7945 else
7946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7947 IEM_MC_FETCH_EFLAGS(EFlags);
7948 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7949 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7950 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7951 else
7952 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7953 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7954
7955 IEM_MC_COMMIT_EFLAGS(EFlags);
7956 IEM_MC_ADVANCE_RIP();
7957 IEM_MC_END();
7958 return VINF_SUCCESS;
7959
7960 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7961 }
7962 }
7963}
7964
7965
7966/** Opcode 0x0f 0xbb. */
7967FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7968{
7969 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7970 IEMOP_HLP_MIN_386();
7971 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7972}
7973
7974
7975/** Opcode 0x0f 0xbc. */
7976FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7977{
7978 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7979 IEMOP_HLP_MIN_386();
7980 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7981 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7982}
7983
7984
7985/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7986FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7987
7988
7989/** Opcode 0x0f 0xbd. */
7990FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7991{
7992 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7993 IEMOP_HLP_MIN_386();
7994 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7995 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7996}
7997
7998
7999/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
8000FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
8001
8002
8003/** Opcode 0x0f 0xbe. */
8004FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
8005{
8006 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
8007 IEMOP_HLP_MIN_386();
8008
8009 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8010
8011 /*
8012 * If rm is denoting a register, no more instruction bytes.
8013 */
8014 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8015 {
8016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8017 switch (pVCpu->iem.s.enmEffOpSize)
8018 {
8019 case IEMMODE_16BIT:
8020 IEM_MC_BEGIN(0, 1);
8021 IEM_MC_LOCAL(uint16_t, u16Value);
8022 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8023 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
8024 IEM_MC_ADVANCE_RIP();
8025 IEM_MC_END();
8026 return VINF_SUCCESS;
8027
8028 case IEMMODE_32BIT:
8029 IEM_MC_BEGIN(0, 1);
8030 IEM_MC_LOCAL(uint32_t, u32Value);
8031 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8032 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8033 IEM_MC_ADVANCE_RIP();
8034 IEM_MC_END();
8035 return VINF_SUCCESS;
8036
8037 case IEMMODE_64BIT:
8038 IEM_MC_BEGIN(0, 1);
8039 IEM_MC_LOCAL(uint64_t, u64Value);
8040 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8041 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8042 IEM_MC_ADVANCE_RIP();
8043 IEM_MC_END();
8044 return VINF_SUCCESS;
8045
8046 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8047 }
8048 }
8049 else
8050 {
8051 /*
8052 * We're loading a register from memory.
8053 */
8054 switch (pVCpu->iem.s.enmEffOpSize)
8055 {
8056 case IEMMODE_16BIT:
8057 IEM_MC_BEGIN(0, 2);
8058 IEM_MC_LOCAL(uint16_t, u16Value);
8059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8062 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8063 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
8064 IEM_MC_ADVANCE_RIP();
8065 IEM_MC_END();
8066 return VINF_SUCCESS;
8067
8068 case IEMMODE_32BIT:
8069 IEM_MC_BEGIN(0, 2);
8070 IEM_MC_LOCAL(uint32_t, u32Value);
8071 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8072 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8074 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8075 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8076 IEM_MC_ADVANCE_RIP();
8077 IEM_MC_END();
8078 return VINF_SUCCESS;
8079
8080 case IEMMODE_64BIT:
8081 IEM_MC_BEGIN(0, 2);
8082 IEM_MC_LOCAL(uint64_t, u64Value);
8083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8086 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8087 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8088 IEM_MC_ADVANCE_RIP();
8089 IEM_MC_END();
8090 return VINF_SUCCESS;
8091
8092 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8093 }
8094 }
8095}
8096
8097
8098/** Opcode 0x0f 0xbf. */
8099FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
8100{
8101 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
8102 IEMOP_HLP_MIN_386();
8103
8104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8105
8106 /** @todo Not entirely sure how the operand size prefix is handled here,
8107 * assuming that it will be ignored. Would be nice to have a few
8108 * test for this. */
8109 /*
8110 * If rm is denoting a register, no more instruction bytes.
8111 */
8112 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8113 {
8114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8115 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8116 {
8117 IEM_MC_BEGIN(0, 1);
8118 IEM_MC_LOCAL(uint32_t, u32Value);
8119 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8120 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8121 IEM_MC_ADVANCE_RIP();
8122 IEM_MC_END();
8123 }
8124 else
8125 {
8126 IEM_MC_BEGIN(0, 1);
8127 IEM_MC_LOCAL(uint64_t, u64Value);
8128 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8129 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8130 IEM_MC_ADVANCE_RIP();
8131 IEM_MC_END();
8132 }
8133 }
8134 else
8135 {
8136 /*
8137 * We're loading a register from memory.
8138 */
8139 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8140 {
8141 IEM_MC_BEGIN(0, 2);
8142 IEM_MC_LOCAL(uint32_t, u32Value);
8143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8144 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8146 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8147 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8148 IEM_MC_ADVANCE_RIP();
8149 IEM_MC_END();
8150 }
8151 else
8152 {
8153 IEM_MC_BEGIN(0, 2);
8154 IEM_MC_LOCAL(uint64_t, u64Value);
8155 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8158 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8159 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8160 IEM_MC_ADVANCE_RIP();
8161 IEM_MC_END();
8162 }
8163 }
8164 return VINF_SUCCESS;
8165}
8166
8167
8168/** Opcode 0x0f 0xc0. */
8169FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
8170{
8171 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8172 IEMOP_HLP_MIN_486();
8173 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
8174
8175 /*
8176 * If rm is denoting a register, no more instruction bytes.
8177 */
8178 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8179 {
8180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8181
8182 IEM_MC_BEGIN(3, 0);
8183 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8184 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8185 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8186
8187 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8188 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8189 IEM_MC_REF_EFLAGS(pEFlags);
8190 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8191
8192 IEM_MC_ADVANCE_RIP();
8193 IEM_MC_END();
8194 }
8195 else
8196 {
8197 /*
8198 * We're accessing memory.
8199 */
8200 IEM_MC_BEGIN(3, 3);
8201 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8202 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8203 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8204 IEM_MC_LOCAL(uint8_t, u8RegCopy);
8205 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8206
8207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8208 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8209 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8210 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
8211 IEM_MC_FETCH_EFLAGS(EFlags);
8212 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8213 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8214 else
8215 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
8216
8217 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
8218 IEM_MC_COMMIT_EFLAGS(EFlags);
8219 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
8220 IEM_MC_ADVANCE_RIP();
8221 IEM_MC_END();
8222 return VINF_SUCCESS;
8223 }
8224 return VINF_SUCCESS;
8225}
8226
8227
8228/** Opcode 0x0f 0xc1. */
8229FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
8230{
8231 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
8232 IEMOP_HLP_MIN_486();
8233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8234
8235 /*
8236 * If rm is denoting a register, no more instruction bytes.
8237 */
8238 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8239 {
8240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8241
8242 switch (pVCpu->iem.s.enmEffOpSize)
8243 {
8244 case IEMMODE_16BIT:
8245 IEM_MC_BEGIN(3, 0);
8246 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8247 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8248 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8249
8250 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8251 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8252 IEM_MC_REF_EFLAGS(pEFlags);
8253 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8254
8255 IEM_MC_ADVANCE_RIP();
8256 IEM_MC_END();
8257 return VINF_SUCCESS;
8258
8259 case IEMMODE_32BIT:
8260 IEM_MC_BEGIN(3, 0);
8261 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8262 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8263 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8264
8265 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8266 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8267 IEM_MC_REF_EFLAGS(pEFlags);
8268 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8269
8270 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8271 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
8272 IEM_MC_ADVANCE_RIP();
8273 IEM_MC_END();
8274 return VINF_SUCCESS;
8275
8276 case IEMMODE_64BIT:
8277 IEM_MC_BEGIN(3, 0);
8278 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8279 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8280 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8281
8282 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8283 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8284 IEM_MC_REF_EFLAGS(pEFlags);
8285 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8286
8287 IEM_MC_ADVANCE_RIP();
8288 IEM_MC_END();
8289 return VINF_SUCCESS;
8290
8291 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8292 }
8293 }
8294 else
8295 {
8296 /*
8297 * We're accessing memory.
8298 */
8299 switch (pVCpu->iem.s.enmEffOpSize)
8300 {
8301 case IEMMODE_16BIT:
8302 IEM_MC_BEGIN(3, 3);
8303 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8304 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8305 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8306 IEM_MC_LOCAL(uint16_t, u16RegCopy);
8307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8308
8309 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8310 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8311 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8312 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
8313 IEM_MC_FETCH_EFLAGS(EFlags);
8314 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8315 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8316 else
8317 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
8318
8319 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8320 IEM_MC_COMMIT_EFLAGS(EFlags);
8321 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
8322 IEM_MC_ADVANCE_RIP();
8323 IEM_MC_END();
8324 return VINF_SUCCESS;
8325
8326 case IEMMODE_32BIT:
8327 IEM_MC_BEGIN(3, 3);
8328 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8329 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8330 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8331 IEM_MC_LOCAL(uint32_t, u32RegCopy);
8332 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8333
8334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8335 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8336 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8337 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
8338 IEM_MC_FETCH_EFLAGS(EFlags);
8339 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8340 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8341 else
8342 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
8343
8344 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8345 IEM_MC_COMMIT_EFLAGS(EFlags);
8346 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
8347 IEM_MC_ADVANCE_RIP();
8348 IEM_MC_END();
8349 return VINF_SUCCESS;
8350
8351 case IEMMODE_64BIT:
8352 IEM_MC_BEGIN(3, 3);
8353 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8354 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8355 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8356 IEM_MC_LOCAL(uint64_t, u64RegCopy);
8357 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8358
8359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8360 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8361 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8362 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
8363 IEM_MC_FETCH_EFLAGS(EFlags);
8364 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8365 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8366 else
8367 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
8368
8369 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8370 IEM_MC_COMMIT_EFLAGS(EFlags);
8371 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
8372 IEM_MC_ADVANCE_RIP();
8373 IEM_MC_END();
8374 return VINF_SUCCESS;
8375
8376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8377 }
8378 }
8379}
8380
8381
8382/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
8383FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
8384/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
8385FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
8386/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
8387FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
8388/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
8389FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
8390
8391
8392/** Opcode 0x0f 0xc3. */
8393FNIEMOP_DEF(iemOp_movnti_My_Gy)
8394{
8395 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
8396
8397 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8398
8399 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
8400 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8401 {
8402 switch (pVCpu->iem.s.enmEffOpSize)
8403 {
8404 case IEMMODE_32BIT:
8405 IEM_MC_BEGIN(0, 2);
8406 IEM_MC_LOCAL(uint32_t, u32Value);
8407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8408
8409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8411 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8412 return IEMOP_RAISE_INVALID_OPCODE();
8413
8414 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8415 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
8416 IEM_MC_ADVANCE_RIP();
8417 IEM_MC_END();
8418 break;
8419
8420 case IEMMODE_64BIT:
8421 IEM_MC_BEGIN(0, 2);
8422 IEM_MC_LOCAL(uint64_t, u64Value);
8423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8424
8425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8427 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8428 return IEMOP_RAISE_INVALID_OPCODE();
8429
8430 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8431 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
8432 IEM_MC_ADVANCE_RIP();
8433 IEM_MC_END();
8434 break;
8435
8436 case IEMMODE_16BIT:
8437 /** @todo check this form. */
8438 return IEMOP_RAISE_INVALID_OPCODE();
8439 }
8440 }
8441 else
8442 return IEMOP_RAISE_INVALID_OPCODE();
8443 return VINF_SUCCESS;
8444}
8445/* Opcode 0x66 0x0f 0xc3 - invalid */
8446/* Opcode 0xf3 0x0f 0xc3 - invalid */
8447/* Opcode 0xf2 0x0f 0xc3 - invalid */
8448
8449/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
8450FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
8451/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
8452FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
8453/* Opcode 0xf3 0x0f 0xc4 - invalid */
8454/* Opcode 0xf2 0x0f 0xc4 - invalid */
8455
8456/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
8457FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
8458/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
8459FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
8460/* Opcode 0xf3 0x0f 0xc5 - invalid */
8461/* Opcode 0xf2 0x0f 0xc5 - invalid */
8462
8463/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
8464FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
8465/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
8466FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
8467/* Opcode 0xf3 0x0f 0xc6 - invalid */
8468/* Opcode 0xf2 0x0f 0xc6 - invalid */
8469
8470
8471/** Opcode 0x0f 0xc7 !11/1. */
8472FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
8473{
8474 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
8475
8476 IEM_MC_BEGIN(4, 3);
8477 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
8478 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
8479 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
8480 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8481 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
8482 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
8483 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8484
8485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8486 IEMOP_HLP_DONE_DECODING();
8487 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8488
8489 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
8490 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
8491 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
8492
8493 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
8494 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
8495 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
8496
8497 IEM_MC_FETCH_EFLAGS(EFlags);
8498 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8499 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8500 else
8501 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8502
8503 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
8504 IEM_MC_COMMIT_EFLAGS(EFlags);
8505 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8506 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
8507 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
8508 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
8509 IEM_MC_ENDIF();
8510 IEM_MC_ADVANCE_RIP();
8511
8512 IEM_MC_END();
8513 return VINF_SUCCESS;
8514}
8515
8516
8517/** Opcode REX.W 0x0f 0xc7 !11/1. */
8518FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
8519{
8520 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
8521 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8522 {
8523#if 0
8524 RT_NOREF(bRm);
8525 IEMOP_BITCH_ABOUT_STUB();
8526 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8527#else
8528 IEM_MC_BEGIN(4, 3);
8529 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
8530 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
8531 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
8532 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8533 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
8534 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
8535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8536
8537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8538 IEMOP_HLP_DONE_DECODING();
8539 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
8540 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8541
8542 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
8543 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
8544 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
8545
8546 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
8547 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
8548 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
8549
8550 IEM_MC_FETCH_EFLAGS(EFlags);
8551# ifdef RT_ARCH_AMD64
8552 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8553 {
8554 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8555 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8556 else
8557 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8558 }
8559 else
8560# endif
8561 {
8562 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
8563 accesses and not all all atomic, which works fine on in UNI CPU guest
8564 configuration (ignoring DMA). If guest SMP is active we have no choice
8565 but to use a rendezvous callback here. Sigh. */
8566 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
8567 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8568 else
8569 {
8570 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8571 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
8572 }
8573 }
8574
8575 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
8576 IEM_MC_COMMIT_EFLAGS(EFlags);
8577 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8578 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
8579 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
8580 IEM_MC_ENDIF();
8581 IEM_MC_ADVANCE_RIP();
8582
8583 IEM_MC_END();
8584 return VINF_SUCCESS;
8585#endif
8586 }
8587 Log(("cmpxchg16b -> #UD\n"));
8588 return IEMOP_RAISE_INVALID_OPCODE();
8589}
8590
8591FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
8592{
8593 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
8594 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
8595 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
8596}
8597
8598/** Opcode 0x0f 0xc7 11/6. */
8599FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
8600
8601/** Opcode 0x0f 0xc7 !11/6. */
8602#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8603FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
8604{
8605 IEMOP_MNEMONIC(vmptrld, "vmptrld");
8606 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
8607 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
8608 IEM_MC_BEGIN(2, 0);
8609 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8610 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
8611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8612 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
8613 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8614 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
8615 IEM_MC_END();
8616 return VINF_SUCCESS;
8617}
8618#else
8619FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
8620#endif
8621
8622/** Opcode 0x66 0x0f 0xc7 !11/6. */
8623#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8624FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
8625{
8626 IEMOP_MNEMONIC(vmclear, "vmclear");
8627 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
8628 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
8629 IEM_MC_BEGIN(2, 0);
8630 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8631 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
8632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8633 IEMOP_HLP_DONE_DECODING();
8634 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8635 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
8636 IEM_MC_END();
8637 return VINF_SUCCESS;
8638}
8639#else
8640FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
8641#endif
8642
8643/** Opcode 0xf3 0x0f 0xc7 !11/6. */
8644#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8645FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
8646{
8647 IEMOP_MNEMONIC(vmxon, "vmxon");
8648 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
8649 IEM_MC_BEGIN(2, 0);
8650 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8651 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
8652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8653 IEMOP_HLP_DONE_DECODING();
8654 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8655 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
8656 IEM_MC_END();
8657 return VINF_SUCCESS;
8658}
8659#else
8660FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
8661#endif
8662
8663/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
8664#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8665FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
8666{
8667 IEMOP_MNEMONIC(vmptrst, "vmptrst");
8668 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
8669 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
8670 IEM_MC_BEGIN(2, 0);
8671 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8672 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
8673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8674 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
8675 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8676 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
8677 IEM_MC_END();
8678 return VINF_SUCCESS;
8679}
8680#else
8681FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
8682#endif
8683
8684/** Opcode 0x0f 0xc7 11/7. */
8685FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
8686
8687
8688/**
8689 * Group 9 jump table for register variant.
8690 */
8691IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
8692{ /* pfx: none, 066h, 0f3h, 0f2h */
8693 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8694 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
8695 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8696 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8697 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8698 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8699 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8700 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8701};
8702AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
8703
8704
8705/**
8706 * Group 9 jump table for memory variant.
8707 */
8708IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
8709{ /* pfx: none, 066h, 0f3h, 0f2h */
8710 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8711 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
8712 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8713 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8714 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8715 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8716 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
8717 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8718};
8719AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
8720
8721
8722/** Opcode 0x0f 0xc7. */
8723FNIEMOP_DEF(iemOp_Grp9)
8724{
8725 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
8726 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8727 /* register, register */
8728 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8729 + pVCpu->iem.s.idxPrefix], bRm);
8730 /* memory, register */
8731 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8732 + pVCpu->iem.s.idxPrefix], bRm);
8733}
8734
8735
8736/**
8737 * Common 'bswap register' helper.
8738 */
8739FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
8740{
8741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8742 switch (pVCpu->iem.s.enmEffOpSize)
8743 {
8744 case IEMMODE_16BIT:
8745 IEM_MC_BEGIN(1, 0);
8746 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8747 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
8748 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
8749 IEM_MC_ADVANCE_RIP();
8750 IEM_MC_END();
8751 return VINF_SUCCESS;
8752
8753 case IEMMODE_32BIT:
8754 IEM_MC_BEGIN(1, 0);
8755 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8756 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8757 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8758 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
8759 IEM_MC_ADVANCE_RIP();
8760 IEM_MC_END();
8761 return VINF_SUCCESS;
8762
8763 case IEMMODE_64BIT:
8764 IEM_MC_BEGIN(1, 0);
8765 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8766 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8767 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
8768 IEM_MC_ADVANCE_RIP();
8769 IEM_MC_END();
8770 return VINF_SUCCESS;
8771
8772 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8773 }
8774}
8775
8776
8777/** Opcode 0x0f 0xc8. */
8778FNIEMOP_DEF(iemOp_bswap_rAX_r8)
8779{
8780 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
8781 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
8782 prefix. REX.B is the correct prefix it appears. For a parallel
8783 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
8784 IEMOP_HLP_MIN_486();
8785 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8786}
8787
8788
8789/** Opcode 0x0f 0xc9. */
8790FNIEMOP_DEF(iemOp_bswap_rCX_r9)
8791{
8792 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
8793 IEMOP_HLP_MIN_486();
8794 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8795}
8796
8797
8798/** Opcode 0x0f 0xca. */
8799FNIEMOP_DEF(iemOp_bswap_rDX_r10)
8800{
8801 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
8802 IEMOP_HLP_MIN_486();
8803 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8804}
8805
8806
8807/** Opcode 0x0f 0xcb. */
8808FNIEMOP_DEF(iemOp_bswap_rBX_r11)
8809{
8810 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
8811 IEMOP_HLP_MIN_486();
8812 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8813}
8814
8815
8816/** Opcode 0x0f 0xcc. */
8817FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8818{
8819 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8820 IEMOP_HLP_MIN_486();
8821 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8822}
8823
8824
8825/** Opcode 0x0f 0xcd. */
8826FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8827{
8828 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8829 IEMOP_HLP_MIN_486();
8830 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8831}
8832
8833
8834/** Opcode 0x0f 0xce. */
8835FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8836{
8837 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8838 IEMOP_HLP_MIN_486();
8839 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8840}
8841
8842
8843/** Opcode 0x0f 0xcf. */
8844FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8845{
8846 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8847 IEMOP_HLP_MIN_486();
8848 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8849}
8850
8851
8852/* Opcode 0x0f 0xd0 - invalid */
8853/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8854FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8855/* Opcode 0xf3 0x0f 0xd0 - invalid */
8856/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8857FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8858
8859/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8860FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8861/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8862FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8863/* Opcode 0xf3 0x0f 0xd1 - invalid */
8864/* Opcode 0xf2 0x0f 0xd1 - invalid */
8865
8866/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8867FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8868/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8869FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8870/* Opcode 0xf3 0x0f 0xd2 - invalid */
8871/* Opcode 0xf2 0x0f 0xd2 - invalid */
8872
8873/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8874FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8875/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8876FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8877/* Opcode 0xf3 0x0f 0xd3 - invalid */
8878/* Opcode 0xf2 0x0f 0xd3 - invalid */
8879
8880/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8881FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8882/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8883FNIEMOP_STUB(iemOp_paddq_Vx_W);
8884/* Opcode 0xf3 0x0f 0xd4 - invalid */
8885/* Opcode 0xf2 0x0f 0xd4 - invalid */
8886
8887/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8888FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8889/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8890FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8891/* Opcode 0xf3 0x0f 0xd5 - invalid */
8892/* Opcode 0xf2 0x0f 0xd5 - invalid */
8893
8894/* Opcode 0x0f 0xd6 - invalid */
8895
8896/**
8897 * @opcode 0xd6
8898 * @oppfx 0x66
8899 * @opcpuid sse2
8900 * @opgroup og_sse2_pcksclr_datamove
8901 * @opxcpttype none
8902 * @optest op1=-1 op2=2 -> op1=2
8903 * @optest op1=0 op2=-42 -> op1=-42
8904 */
8905FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8906{
8907 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8908 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8909 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8910 {
8911 /*
8912 * Register, register.
8913 */
8914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8915 IEM_MC_BEGIN(0, 2);
8916 IEM_MC_LOCAL(uint64_t, uSrc);
8917
8918 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8919 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8920
8921 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8922 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8923
8924 IEM_MC_ADVANCE_RIP();
8925 IEM_MC_END();
8926 }
8927 else
8928 {
8929 /*
8930 * Memory, register.
8931 */
8932 IEM_MC_BEGIN(0, 2);
8933 IEM_MC_LOCAL(uint64_t, uSrc);
8934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8935
8936 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8938 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8939 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8940
8941 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8942 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8943
8944 IEM_MC_ADVANCE_RIP();
8945 IEM_MC_END();
8946 }
8947 return VINF_SUCCESS;
8948}
8949
8950
8951/**
8952 * @opcode 0xd6
8953 * @opcodesub 11 mr/reg
8954 * @oppfx f3
8955 * @opcpuid sse2
8956 * @opgroup og_sse2_simdint_datamove
8957 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8958 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8959 */
8960FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
8961{
8962 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8963 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8964 {
8965 /*
8966 * Register, register.
8967 */
8968 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8970 IEM_MC_BEGIN(0, 1);
8971 IEM_MC_LOCAL(uint64_t, uSrc);
8972
8973 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8974 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8975
8976 IEM_MC_FETCH_MREG_U64(uSrc, bRm & X86_MODRM_RM_MASK);
8977 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
8978 IEM_MC_FPU_TO_MMX_MODE();
8979
8980 IEM_MC_ADVANCE_RIP();
8981 IEM_MC_END();
8982 return VINF_SUCCESS;
8983 }
8984
8985 /**
8986 * @opdone
8987 * @opmnemonic udf30fd6mem
8988 * @opcode 0xd6
8989 * @opcodesub !11 mr/reg
8990 * @oppfx f3
8991 * @opunused intel-modrm
8992 * @opcpuid sse
8993 * @optest ->
8994 */
8995 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8996}
8997
8998
8999/**
9000 * @opcode 0xd6
9001 * @opcodesub 11 mr/reg
9002 * @oppfx f2
9003 * @opcpuid sse2
9004 * @opgroup og_sse2_simdint_datamove
9005 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
9006 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
9007 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
9008 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
9009 * @optest op1=-42 op2=0xfedcba9876543210
9010 * -> op1=0xfedcba9876543210 ftw=0xff
9011 */
9012FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
9013{
9014 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9015 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9016 {
9017 /*
9018 * Register, register.
9019 */
9020 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9022 IEM_MC_BEGIN(0, 1);
9023 IEM_MC_LOCAL(uint64_t, uSrc);
9024
9025 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9026 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9027
9028 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9029 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, uSrc);
9030 IEM_MC_FPU_TO_MMX_MODE();
9031
9032 IEM_MC_ADVANCE_RIP();
9033 IEM_MC_END();
9034 return VINF_SUCCESS;
9035 }
9036
9037 /**
9038 * @opdone
9039 * @opmnemonic udf20fd6mem
9040 * @opcode 0xd6
9041 * @opcodesub !11 mr/reg
9042 * @oppfx f2
9043 * @opunused intel-modrm
9044 * @opcpuid sse
9045 * @optest ->
9046 */
9047 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
9048}
9049
9050/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
9051FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
9052{
9053 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
9054 /** @todo testcase: Check that the instruction implicitly clears the high
9055 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
9056 * and opcode modifications are made to work with the whole width (not
9057 * just 128). */
9058 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
9059 /* Docs says register only. */
9060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9061 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
9062 {
9063 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
9064 IEM_MC_BEGIN(2, 0);
9065 IEM_MC_ARG(uint64_t *, pDst, 0);
9066 IEM_MC_ARG(uint64_t const *, pSrc, 1);
9067 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
9068 IEM_MC_PREPARE_FPU_USAGE();
9069 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9070 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
9071 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
9072 IEM_MC_ADVANCE_RIP();
9073 IEM_MC_END();
9074 return VINF_SUCCESS;
9075 }
9076 return IEMOP_RAISE_INVALID_OPCODE();
9077}
9078
9079/** Opcode 0x66 0x0f 0xd7 - */
9080FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
9081{
9082 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
9083 /** @todo testcase: Check that the instruction implicitly clears the high
9084 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
9085 * and opcode modifications are made to work with the whole width (not
9086 * just 128). */
9087 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
9088 /* Docs says register only. */
9089 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9090 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
9091 {
9092 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
9093 IEM_MC_BEGIN(2, 0);
9094 IEM_MC_ARG(uint64_t *, pDst, 0);
9095 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
9096 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9097 IEM_MC_PREPARE_SSE_USAGE();
9098 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9099 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9100 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
9101 IEM_MC_ADVANCE_RIP();
9102 IEM_MC_END();
9103 return VINF_SUCCESS;
9104 }
9105 return IEMOP_RAISE_INVALID_OPCODE();
9106}
9107
9108/* Opcode 0xf3 0x0f 0xd7 - invalid */
9109/* Opcode 0xf2 0x0f 0xd7 - invalid */
9110
9111
9112/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
9113FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
9114/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
9115FNIEMOP_STUB(iemOp_psubusb_Vx_W);
9116/* Opcode 0xf3 0x0f 0xd8 - invalid */
9117/* Opcode 0xf2 0x0f 0xd8 - invalid */
9118
9119/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
9120FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
9121/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
9122FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
9123/* Opcode 0xf3 0x0f 0xd9 - invalid */
9124/* Opcode 0xf2 0x0f 0xd9 - invalid */
9125
9126/** Opcode 0x0f 0xda - pminub Pq, Qq */
9127FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
9128/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
9129FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
9130/* Opcode 0xf3 0x0f 0xda - invalid */
9131/* Opcode 0xf2 0x0f 0xda - invalid */
9132
9133/** Opcode 0x0f 0xdb - pand Pq, Qq */
9134FNIEMOP_STUB(iemOp_pand_Pq_Qq);
9135/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
9136FNIEMOP_STUB(iemOp_pand_Vx_W);
9137/* Opcode 0xf3 0x0f 0xdb - invalid */
9138/* Opcode 0xf2 0x0f 0xdb - invalid */
9139
9140/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
9141FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
9142/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
9143FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
9144/* Opcode 0xf3 0x0f 0xdc - invalid */
9145/* Opcode 0xf2 0x0f 0xdc - invalid */
9146
9147/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
9148FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
9149/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
9150FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
9151/* Opcode 0xf3 0x0f 0xdd - invalid */
9152/* Opcode 0xf2 0x0f 0xdd - invalid */
9153
9154/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
9155FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
9156/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
9157FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
9158/* Opcode 0xf3 0x0f 0xde - invalid */
9159/* Opcode 0xf2 0x0f 0xde - invalid */
9160
9161/** Opcode 0x0f 0xdf - pandn Pq, Qq */
9162FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
9163/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
9164FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
9165/* Opcode 0xf3 0x0f 0xdf - invalid */
9166/* Opcode 0xf2 0x0f 0xdf - invalid */
9167
9168/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
9169FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
9170/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
9171FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
9172/* Opcode 0xf3 0x0f 0xe0 - invalid */
9173/* Opcode 0xf2 0x0f 0xe0 - invalid */
9174
9175/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
9176FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
9177/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
9178FNIEMOP_STUB(iemOp_psraw_Vx_W);
9179/* Opcode 0xf3 0x0f 0xe1 - invalid */
9180/* Opcode 0xf2 0x0f 0xe1 - invalid */
9181
9182/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
9183FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
9184/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
9185FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
9186/* Opcode 0xf3 0x0f 0xe2 - invalid */
9187/* Opcode 0xf2 0x0f 0xe2 - invalid */
9188
9189/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
9190FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
9191/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
9192FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
9193/* Opcode 0xf3 0x0f 0xe3 - invalid */
9194/* Opcode 0xf2 0x0f 0xe3 - invalid */
9195
9196/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
9197FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
9198/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
9199FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
9200/* Opcode 0xf3 0x0f 0xe4 - invalid */
9201/* Opcode 0xf2 0x0f 0xe4 - invalid */
9202
9203/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
9204FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
9205/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
9206FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
9207/* Opcode 0xf3 0x0f 0xe5 - invalid */
9208/* Opcode 0xf2 0x0f 0xe5 - invalid */
9209
9210/* Opcode 0x0f 0xe6 - invalid */
9211/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
9212FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
9213/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
9214FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
9215/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
9216FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
9217
9218
9219/**
9220 * @opcode 0xe7
9221 * @opcodesub !11 mr/reg
9222 * @oppfx none
9223 * @opcpuid sse
9224 * @opgroup og_sse1_cachect
9225 * @opxcpttype none
9226 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
9227 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
9228 */
9229FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
9230{
9231 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9232 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9233 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9234 {
9235 /* Register, memory. */
9236 IEM_MC_BEGIN(0, 2);
9237 IEM_MC_LOCAL(uint64_t, uSrc);
9238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9239
9240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9242 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
9243 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9244
9245 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9246 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9247 IEM_MC_FPU_TO_MMX_MODE();
9248
9249 IEM_MC_ADVANCE_RIP();
9250 IEM_MC_END();
9251 return VINF_SUCCESS;
9252 }
9253 /**
9254 * @opdone
9255 * @opmnemonic ud0fe7reg
9256 * @opcode 0xe7
9257 * @opcodesub 11 mr/reg
9258 * @oppfx none
9259 * @opunused immediate
9260 * @opcpuid sse
9261 * @optest ->
9262 */
9263 return IEMOP_RAISE_INVALID_OPCODE();
9264}
9265
9266/**
9267 * @opcode 0xe7
9268 * @opcodesub !11 mr/reg
9269 * @oppfx 0x66
9270 * @opcpuid sse2
9271 * @opgroup og_sse2_cachect
9272 * @opxcpttype 1
9273 * @optest op1=-1 op2=2 -> op1=2
9274 * @optest op1=0 op2=-42 -> op1=-42
9275 */
9276FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
9277{
9278 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9280 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9281 {
9282 /* Register, memory. */
9283 IEM_MC_BEGIN(0, 2);
9284 IEM_MC_LOCAL(RTUINT128U, uSrc);
9285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9286
9287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9289 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9290 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9291
9292 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9293 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9294
9295 IEM_MC_ADVANCE_RIP();
9296 IEM_MC_END();
9297 return VINF_SUCCESS;
9298 }
9299
9300 /**
9301 * @opdone
9302 * @opmnemonic ud660fe7reg
9303 * @opcode 0xe7
9304 * @opcodesub 11 mr/reg
9305 * @oppfx 0x66
9306 * @opunused immediate
9307 * @opcpuid sse
9308 * @optest ->
9309 */
9310 return IEMOP_RAISE_INVALID_OPCODE();
9311}
9312
9313/* Opcode 0xf3 0x0f 0xe7 - invalid */
9314/* Opcode 0xf2 0x0f 0xe7 - invalid */
9315
9316
9317/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
9318FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
9319/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
9320FNIEMOP_STUB(iemOp_psubsb_Vx_W);
9321/* Opcode 0xf3 0x0f 0xe8 - invalid */
9322/* Opcode 0xf2 0x0f 0xe8 - invalid */
9323
9324/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
9325FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
9326/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
9327FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
9328/* Opcode 0xf3 0x0f 0xe9 - invalid */
9329/* Opcode 0xf2 0x0f 0xe9 - invalid */
9330
9331/** Opcode 0x0f 0xea - pminsw Pq, Qq */
9332FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
9333/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
9334FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
9335/* Opcode 0xf3 0x0f 0xea - invalid */
9336/* Opcode 0xf2 0x0f 0xea - invalid */
9337
9338/** Opcode 0x0f 0xeb - por Pq, Qq */
9339FNIEMOP_STUB(iemOp_por_Pq_Qq);
9340/** Opcode 0x66 0x0f 0xeb - por Vx, W */
9341FNIEMOP_STUB(iemOp_por_Vx_W);
9342/* Opcode 0xf3 0x0f 0xeb - invalid */
9343/* Opcode 0xf2 0x0f 0xeb - invalid */
9344
9345/** Opcode 0x0f 0xec - paddsb Pq, Qq */
9346FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
9347/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
9348FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
9349/* Opcode 0xf3 0x0f 0xec - invalid */
9350/* Opcode 0xf2 0x0f 0xec - invalid */
9351
9352/** Opcode 0x0f 0xed - paddsw Pq, Qq */
9353FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
9354/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
9355FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
9356/* Opcode 0xf3 0x0f 0xed - invalid */
9357/* Opcode 0xf2 0x0f 0xed - invalid */
9358
9359/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
9360FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
9361/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
9362FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
9363/* Opcode 0xf3 0x0f 0xee - invalid */
9364/* Opcode 0xf2 0x0f 0xee - invalid */
9365
9366
9367/** Opcode 0x0f 0xef - pxor Pq, Qq */
9368FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
9369{
9370 IEMOP_MNEMONIC(pxor, "pxor");
9371 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
9372}
9373
9374/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
9375FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
9376{
9377 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
9378 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
9379}
9380
9381/* Opcode 0xf3 0x0f 0xef - invalid */
9382/* Opcode 0xf2 0x0f 0xef - invalid */
9383
9384/* Opcode 0x0f 0xf0 - invalid */
9385/* Opcode 0x66 0x0f 0xf0 - invalid */
9386/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
9387FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
9388
9389/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
9390FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
9391/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
9392FNIEMOP_STUB(iemOp_psllw_Vx_W);
9393/* Opcode 0xf2 0x0f 0xf1 - invalid */
9394
9395/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
9396FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
9397/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
9398FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
9399/* Opcode 0xf2 0x0f 0xf2 - invalid */
9400
9401/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
9402FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
9403/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
9404FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
9405/* Opcode 0xf2 0x0f 0xf3 - invalid */
9406
9407/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
9408FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
9409/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
9410FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
9411/* Opcode 0xf2 0x0f 0xf4 - invalid */
9412
9413/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
9414FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
9415/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
9416FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
9417/* Opcode 0xf2 0x0f 0xf5 - invalid */
9418
9419/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
9420FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
9421/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
9422FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
9423/* Opcode 0xf2 0x0f 0xf6 - invalid */
9424
9425/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
9426FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
9427/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
9428FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
9429/* Opcode 0xf2 0x0f 0xf7 - invalid */
9430
9431/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
9432FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
9433/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
9434FNIEMOP_STUB(iemOp_psubb_Vx_W);
9435/* Opcode 0xf2 0x0f 0xf8 - invalid */
9436
9437/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
9438FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
9439/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
9440FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
9441/* Opcode 0xf2 0x0f 0xf9 - invalid */
9442
9443/** Opcode 0x0f 0xfa - psubd Pq, Qq */
9444FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
9445/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
9446FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
9447/* Opcode 0xf2 0x0f 0xfa - invalid */
9448
9449/** Opcode 0x0f 0xfb - psubq Pq, Qq */
9450FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
9451/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
9452FNIEMOP_STUB(iemOp_psubq_Vx_W);
9453/* Opcode 0xf2 0x0f 0xfb - invalid */
9454
9455/** Opcode 0x0f 0xfc - paddb Pq, Qq */
9456FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
9457/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
9458FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
9459/* Opcode 0xf2 0x0f 0xfc - invalid */
9460
9461/** Opcode 0x0f 0xfd - paddw Pq, Qq */
9462FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
9463/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
9464FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
9465/* Opcode 0xf2 0x0f 0xfd - invalid */
9466
9467/** Opcode 0x0f 0xfe - paddd Pq, Qq */
9468FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
9469/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
9470FNIEMOP_STUB(iemOp_paddd_Vx_W);
9471/* Opcode 0xf2 0x0f 0xfe - invalid */
9472
9473
9474/** Opcode **** 0x0f 0xff - UD0 */
9475FNIEMOP_DEF(iemOp_ud0)
9476{
9477 IEMOP_MNEMONIC(ud0, "ud0");
9478 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
9479 {
9480 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
9481#ifndef TST_IEM_CHECK_MC
9482 RTGCPTR GCPtrEff;
9483 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
9484 if (rcStrict != VINF_SUCCESS)
9485 return rcStrict;
9486#endif
9487 IEMOP_HLP_DONE_DECODING();
9488 }
9489 return IEMOP_RAISE_INVALID_OPCODE();
9490}
9491
9492
9493
9494/**
9495 * Two byte opcode map, first byte 0x0f.
9496 *
9497 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
9498 * check if it needs updating as well when making changes.
9499 */
9500IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
9501{
9502 /* no prefix, 066h prefix f3h prefix, f2h prefix */
9503 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
9504 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
9505 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
9506 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
9507 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
9508 /* 0x05 */ IEMOP_X4(iemOp_syscall),
9509 /* 0x06 */ IEMOP_X4(iemOp_clts),
9510 /* 0x07 */ IEMOP_X4(iemOp_sysret),
9511 /* 0x08 */ IEMOP_X4(iemOp_invd),
9512 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
9513 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
9514 /* 0x0b */ IEMOP_X4(iemOp_ud2),
9515 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
9516 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
9517 /* 0x0e */ IEMOP_X4(iemOp_femms),
9518 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
9519
9520 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
9521 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
9522 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
9523 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9524 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9525 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9526 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
9527 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9528 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
9529 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
9530 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
9531 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
9532 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
9533 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
9534 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
9535 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
9536
9537 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
9538 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
9539 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
9540 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
9541 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
9542 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9543 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
9544 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9545 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9546 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9547 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
9548 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9549 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
9550 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
9551 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9552 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9553
9554 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
9555 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
9556 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
9557 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
9558 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
9559 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
9560 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
9561 /* 0x37 */ IEMOP_X4(iemOp_getsec),
9562 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
9563 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9564 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
9565 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9566 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9567 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9568 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9569 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9570
9571 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
9572 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
9573 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
9574 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
9575 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
9576 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
9577 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
9578 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
9579 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
9580 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
9581 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
9582 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
9583 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
9584 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
9585 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
9586 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
9587
9588 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9589 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
9590 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
9591 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
9592 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9593 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9594 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9595 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9596 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
9597 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
9598 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
9599 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
9600 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
9601 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
9602 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
9603 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
9604
9605 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9606 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9607 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9608 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9609 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9610 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9611 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9612 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9613 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9614 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9615 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9616 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9617 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9618 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9619 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9620 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
9621
9622 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
9623 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
9624 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
9625 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
9626 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9627 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9628 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9629 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9630
9631 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9632 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9633 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9634 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9635 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
9636 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
9637 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
9638 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
9639
9640 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
9641 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
9642 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
9643 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
9644 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
9645 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
9646 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
9647 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
9648 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
9649 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
9650 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
9651 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
9652 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
9653 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
9654 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
9655 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
9656
9657 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
9658 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
9659 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
9660 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
9661 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
9662 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
9663 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
9664 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
9665 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
9666 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
9667 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
9668 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
9669 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
9670 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
9671 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
9672 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
9673
9674 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
9675 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
9676 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
9677 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
9678 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
9679 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
9680 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
9681 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
9682 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
9683 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
9684 /* 0xaa */ IEMOP_X4(iemOp_rsm),
9685 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
9686 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
9687 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
9688 /* 0xae */ IEMOP_X4(iemOp_Grp15),
9689 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
9690
9691 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
9692 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
9693 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
9694 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
9695 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
9696 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
9697 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
9698 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
9699 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
9700 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
9701 /* 0xba */ IEMOP_X4(iemOp_Grp8),
9702 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
9703 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
9704 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
9705 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
9706 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
9707
9708 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
9709 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
9710 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
9711 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9712 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9713 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9714 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9715 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
9716 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
9717 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
9718 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
9719 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
9720 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
9721 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
9722 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
9723 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
9724
9725 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
9726 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9727 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9728 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9729 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9730 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9731 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
9732 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9733 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9734 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9735 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9736 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9737 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9738 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9739 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9740 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9741
9742 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9743 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9744 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9745 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9746 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9747 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9748 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
9749 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9750 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9751 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9752 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9753 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9754 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9755 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9756 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9757 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9758
9759 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
9760 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9761 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9762 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9763 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9764 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9765 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9766 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9767 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9768 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9769 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9770 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9771 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9772 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9773 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9774 /* 0xff */ IEMOP_X4(iemOp_ud0),
9775};
9776AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
9777
9778/** @} */
9779
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette