VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 78402

Last change on this file since 78402 was 76652, checked in by vboxsync, 6 years ago

VMM/IEM: Nested VMX: bugref:9180 VMREAD/VMLAUNCH fixes.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 342.7 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 76652 2019-01-06 07:29:53Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2019 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
38 }
39
40 /* Ignore operand size here, memory refs are always 16-bit. */
41 IEM_MC_BEGIN(2, 0);
42 IEM_MC_ARG(uint16_t, iEffSeg, 0);
43 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
44 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
45 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
46 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
47 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
48 IEM_MC_END();
49 return VINF_SUCCESS;
50}
51
52
53/** Opcode 0x0f 0x00 /1. */
54FNIEMOPRM_DEF(iemOp_Grp6_str)
55{
56 IEMOP_MNEMONIC(str, "str Rv/Mw");
57 IEMOP_HLP_MIN_286();
58 IEMOP_HLP_NO_REAL_OR_V86_MODE();
59
60
61 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
62 {
63 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
64 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
65 }
66
67 /* Ignore operand size here, memory refs are always 16-bit. */
68 IEM_MC_BEGIN(2, 0);
69 IEM_MC_ARG(uint16_t, iEffSeg, 0);
70 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
71 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
72 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
73 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
74 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
75 IEM_MC_END();
76 return VINF_SUCCESS;
77}
78
79
80/** Opcode 0x0f 0x00 /2. */
81FNIEMOPRM_DEF(iemOp_Grp6_lldt)
82{
83 IEMOP_MNEMONIC(lldt, "lldt Ew");
84 IEMOP_HLP_MIN_286();
85 IEMOP_HLP_NO_REAL_OR_V86_MODE();
86
87 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
88 {
89 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
90 IEM_MC_BEGIN(1, 0);
91 IEM_MC_ARG(uint16_t, u16Sel, 0);
92 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
93 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
94 IEM_MC_END();
95 }
96 else
97 {
98 IEM_MC_BEGIN(1, 1);
99 IEM_MC_ARG(uint16_t, u16Sel, 0);
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
102 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
103 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
104 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
105 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
106 IEM_MC_END();
107 }
108 return VINF_SUCCESS;
109}
110
111
112/** Opcode 0x0f 0x00 /3. */
113FNIEMOPRM_DEF(iemOp_Grp6_ltr)
114{
115 IEMOP_MNEMONIC(ltr, "ltr Ew");
116 IEMOP_HLP_MIN_286();
117 IEMOP_HLP_NO_REAL_OR_V86_MODE();
118
119 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
120 {
121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
122 IEM_MC_BEGIN(1, 0);
123 IEM_MC_ARG(uint16_t, u16Sel, 0);
124 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
125 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
126 IEM_MC_END();
127 }
128 else
129 {
130 IEM_MC_BEGIN(1, 1);
131 IEM_MC_ARG(uint16_t, u16Sel, 0);
132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
135 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
136 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
137 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
138 IEM_MC_END();
139 }
140 return VINF_SUCCESS;
141}
142
143
144/** Opcode 0x0f 0x00 /3. */
145FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
146{
147 IEMOP_HLP_MIN_286();
148 IEMOP_HLP_NO_REAL_OR_V86_MODE();
149
150 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
151 {
152 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
153 IEM_MC_BEGIN(2, 0);
154 IEM_MC_ARG(uint16_t, u16Sel, 0);
155 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
156 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
157 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
158 IEM_MC_END();
159 }
160 else
161 {
162 IEM_MC_BEGIN(2, 1);
163 IEM_MC_ARG(uint16_t, u16Sel, 0);
164 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
167 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
168 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
169 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
170 IEM_MC_END();
171 }
172 return VINF_SUCCESS;
173}
174
175
176/** Opcode 0x0f 0x00 /4. */
177FNIEMOPRM_DEF(iemOp_Grp6_verr)
178{
179 IEMOP_MNEMONIC(verr, "verr Ew");
180 IEMOP_HLP_MIN_286();
181 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
182}
183
184
185/** Opcode 0x0f 0x00 /5. */
186FNIEMOPRM_DEF(iemOp_Grp6_verw)
187{
188 IEMOP_MNEMONIC(verw, "verw Ew");
189 IEMOP_HLP_MIN_286();
190 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
191}
192
193
194/**
195 * Group 6 jump table.
196 */
197IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
198{
199 iemOp_Grp6_sldt,
200 iemOp_Grp6_str,
201 iemOp_Grp6_lldt,
202 iemOp_Grp6_ltr,
203 iemOp_Grp6_verr,
204 iemOp_Grp6_verw,
205 iemOp_InvalidWithRM,
206 iemOp_InvalidWithRM
207};
208
209/** Opcode 0x0f 0x00. */
210FNIEMOP_DEF(iemOp_Grp6)
211{
212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
213 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
214}
215
216
217/** Opcode 0x0f 0x01 /0. */
218FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
219{
220 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
221 IEMOP_HLP_MIN_286();
222 IEMOP_HLP_64BIT_OP_SIZE();
223 IEM_MC_BEGIN(2, 1);
224 IEM_MC_ARG(uint8_t, iEffSeg, 0);
225 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
228 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
229 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
230 IEM_MC_END();
231 return VINF_SUCCESS;
232}
233
234
235/** Opcode 0x0f 0x01 /0. */
236FNIEMOP_DEF(iemOp_Grp7_vmcall)
237{
238 IEMOP_MNEMONIC(vmcall, "vmcall");
239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
240
241 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
242 want all hypercalls regardless of instruction used, and if a
243 hypercall isn't handled by GIM or HMSvm will raise an #UD.
244 (NEM/win makes ASSUMPTIONS about this behavior.) */
245 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
246}
247
248
249/** Opcode 0x0f 0x01 /0. */
250#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
251FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
252{
253 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
254 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
255 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
256 IEMOP_HLP_DONE_DECODING();
257 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
258}
259#else
260FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
261{
262 IEMOP_BITCH_ABOUT_STUB();
263 return IEMOP_RAISE_INVALID_OPCODE();
264}
265#endif
266
267
268/** Opcode 0x0f 0x01 /0. */
269#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
270FNIEMOP_DEF(iemOp_Grp7_vmresume)
271{
272 IEMOP_MNEMONIC(vmresume, "vmresume");
273 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
274 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
275 IEMOP_HLP_DONE_DECODING();
276 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume);
277}
278#else
279FNIEMOP_DEF(iemOp_Grp7_vmresume)
280{
281 IEMOP_BITCH_ABOUT_STUB();
282 return IEMOP_RAISE_INVALID_OPCODE();
283}
284#endif
285
286
287/** Opcode 0x0f 0x01 /0. */
288#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
289FNIEMOP_DEF(iemOp_Grp7_vmxoff)
290{
291 IEMOP_MNEMONIC(vmxoff, "vmxoff");
292 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
293 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
294 IEMOP_HLP_DONE_DECODING();
295 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
296}
297#else
298FNIEMOP_DEF(iemOp_Grp7_vmxoff)
299{
300 IEMOP_BITCH_ABOUT_STUB();
301 return IEMOP_RAISE_INVALID_OPCODE();
302}
303#endif
304
305
306/** Opcode 0x0f 0x01 /1. */
307FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
308{
309 IEMOP_MNEMONIC(sidt, "sidt Ms");
310 IEMOP_HLP_MIN_286();
311 IEMOP_HLP_64BIT_OP_SIZE();
312 IEM_MC_BEGIN(2, 1);
313 IEM_MC_ARG(uint8_t, iEffSeg, 0);
314 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
317 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
318 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
319 IEM_MC_END();
320 return VINF_SUCCESS;
321}
322
323
324/** Opcode 0x0f 0x01 /1. */
325FNIEMOP_DEF(iemOp_Grp7_monitor)
326{
327 IEMOP_MNEMONIC(monitor, "monitor");
328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
329 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
330}
331
332
333/** Opcode 0x0f 0x01 /1. */
334FNIEMOP_DEF(iemOp_Grp7_mwait)
335{
336 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
338 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
339}
340
341
342/** Opcode 0x0f 0x01 /2. */
343FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
344{
345 IEMOP_MNEMONIC(lgdt, "lgdt");
346 IEMOP_HLP_64BIT_OP_SIZE();
347 IEM_MC_BEGIN(3, 1);
348 IEM_MC_ARG(uint8_t, iEffSeg, 0);
349 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
350 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
353 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
354 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
355 IEM_MC_END();
356 return VINF_SUCCESS;
357}
358
359
360/** Opcode 0x0f 0x01 0xd0. */
361FNIEMOP_DEF(iemOp_Grp7_xgetbv)
362{
363 IEMOP_MNEMONIC(xgetbv, "xgetbv");
364 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
365 {
366 /** @todo r=ramshankar: We should use
367 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
368 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
369 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
370 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
371 }
372 return IEMOP_RAISE_INVALID_OPCODE();
373}
374
375
376/** Opcode 0x0f 0x01 0xd1. */
377FNIEMOP_DEF(iemOp_Grp7_xsetbv)
378{
379 IEMOP_MNEMONIC(xsetbv, "xsetbv");
380 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
381 {
382 /** @todo r=ramshankar: We should use
383 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
384 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
385 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
386 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
387 }
388 return IEMOP_RAISE_INVALID_OPCODE();
389}
390
391
392/** Opcode 0x0f 0x01 /3. */
393FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
394{
395 IEMOP_MNEMONIC(lidt, "lidt");
396 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
397 ? IEMMODE_64BIT
398 : pVCpu->iem.s.enmEffOpSize;
399 IEM_MC_BEGIN(3, 1);
400 IEM_MC_ARG(uint8_t, iEffSeg, 0);
401 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
402 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
405 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
406 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
407 IEM_MC_END();
408 return VINF_SUCCESS;
409}
410
411
412/** Opcode 0x0f 0x01 0xd8. */
413#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
414FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
415{
416 IEMOP_MNEMONIC(vmrun, "vmrun");
417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
418 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
419}
420#else
421FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
422#endif
423
424/** Opcode 0x0f 0x01 0xd9. */
425FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
426{
427 IEMOP_MNEMONIC(vmmcall, "vmmcall");
428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
429
430 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
431 want all hypercalls regardless of instruction used, and if a
432 hypercall isn't handled by GIM or HMSvm will raise an #UD.
433 (NEM/win makes ASSUMPTIONS about this behavior.) */
434 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
435}
436
437/** Opcode 0x0f 0x01 0xda. */
438#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
439FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
440{
441 IEMOP_MNEMONIC(vmload, "vmload");
442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
443 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
444}
445#else
446FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
447#endif
448
449
450/** Opcode 0x0f 0x01 0xdb. */
451#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
452FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
453{
454 IEMOP_MNEMONIC(vmsave, "vmsave");
455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
456 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
457}
458#else
459FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
460#endif
461
462
463/** Opcode 0x0f 0x01 0xdc. */
464#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
465FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
466{
467 IEMOP_MNEMONIC(stgi, "stgi");
468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
469 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
470}
471#else
472FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
473#endif
474
475
476/** Opcode 0x0f 0x01 0xdd. */
477#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
478FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
479{
480 IEMOP_MNEMONIC(clgi, "clgi");
481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
482 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
483}
484#else
485FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
486#endif
487
488
489/** Opcode 0x0f 0x01 0xdf. */
490#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
491FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
492{
493 IEMOP_MNEMONIC(invlpga, "invlpga");
494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
495 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
496}
497#else
498FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
499#endif
500
501
502/** Opcode 0x0f 0x01 0xde. */
503#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
504FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
505{
506 IEMOP_MNEMONIC(skinit, "skinit");
507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
508 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
509}
510#else
511FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
512#endif
513
514
515/** Opcode 0x0f 0x01 /4. */
516FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
517{
518 IEMOP_MNEMONIC(smsw, "smsw");
519 IEMOP_HLP_MIN_286();
520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
521 {
522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
523 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
524 }
525
526 /* Ignore operand size here, memory refs are always 16-bit. */
527 IEM_MC_BEGIN(2, 0);
528 IEM_MC_ARG(uint16_t, iEffSeg, 0);
529 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
532 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
533 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
534 IEM_MC_END();
535 return VINF_SUCCESS;
536}
537
538
539/** Opcode 0x0f 0x01 /6. */
540FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
541{
542 /* The operand size is effectively ignored, all is 16-bit and only the
543 lower 3-bits are used. */
544 IEMOP_MNEMONIC(lmsw, "lmsw");
545 IEMOP_HLP_MIN_286();
546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
547 {
548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
549 IEM_MC_BEGIN(2, 0);
550 IEM_MC_ARG(uint16_t, u16Tmp, 0);
551 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
552 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
553 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
554 IEM_MC_END();
555 }
556 else
557 {
558 IEM_MC_BEGIN(2, 0);
559 IEM_MC_ARG(uint16_t, u16Tmp, 0);
560 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
563 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
564 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
565 IEM_MC_END();
566 }
567 return VINF_SUCCESS;
568}
569
570
571/** Opcode 0x0f 0x01 /7. */
572FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
573{
574 IEMOP_MNEMONIC(invlpg, "invlpg");
575 IEMOP_HLP_MIN_486();
576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
577 IEM_MC_BEGIN(1, 1);
578 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
580 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
581 IEM_MC_END();
582 return VINF_SUCCESS;
583}
584
585
586/** Opcode 0x0f 0x01 /7. */
587FNIEMOP_DEF(iemOp_Grp7_swapgs)
588{
589 IEMOP_MNEMONIC(swapgs, "swapgs");
590 IEMOP_HLP_ONLY_64BIT();
591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
592 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
593}
594
595
596/** Opcode 0x0f 0x01 /7. */
597FNIEMOP_DEF(iemOp_Grp7_rdtscp)
598{
599 IEMOP_MNEMONIC(rdtscp, "rdtscp");
600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
601 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
602}
603
604
605/**
606 * Group 7 jump table, memory variant.
607 */
608IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
609{
610 iemOp_Grp7_sgdt,
611 iemOp_Grp7_sidt,
612 iemOp_Grp7_lgdt,
613 iemOp_Grp7_lidt,
614 iemOp_Grp7_smsw,
615 iemOp_InvalidWithRM,
616 iemOp_Grp7_lmsw,
617 iemOp_Grp7_invlpg
618};
619
620
621/** Opcode 0x0f 0x01. */
622FNIEMOP_DEF(iemOp_Grp7)
623{
624 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
625 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
626 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
627
628 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
629 {
630 case 0:
631 switch (bRm & X86_MODRM_RM_MASK)
632 {
633 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
634 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
635 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
636 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
637 }
638 return IEMOP_RAISE_INVALID_OPCODE();
639
640 case 1:
641 switch (bRm & X86_MODRM_RM_MASK)
642 {
643 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
644 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
645 }
646 return IEMOP_RAISE_INVALID_OPCODE();
647
648 case 2:
649 switch (bRm & X86_MODRM_RM_MASK)
650 {
651 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
652 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
653 }
654 return IEMOP_RAISE_INVALID_OPCODE();
655
656 case 3:
657 switch (bRm & X86_MODRM_RM_MASK)
658 {
659 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
660 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
661 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
662 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
663 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
664 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
665 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
666 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
667 IEM_NOT_REACHED_DEFAULT_CASE_RET();
668 }
669
670 case 4:
671 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
672
673 case 5:
674 return IEMOP_RAISE_INVALID_OPCODE();
675
676 case 6:
677 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
678
679 case 7:
680 switch (bRm & X86_MODRM_RM_MASK)
681 {
682 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
683 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
684 }
685 return IEMOP_RAISE_INVALID_OPCODE();
686
687 IEM_NOT_REACHED_DEFAULT_CASE_RET();
688 }
689}
690
691/** Opcode 0x0f 0x00 /3. */
692FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
693{
694 IEMOP_HLP_NO_REAL_OR_V86_MODE();
695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
696
697 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
698 {
699 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
700 switch (pVCpu->iem.s.enmEffOpSize)
701 {
702 case IEMMODE_16BIT:
703 {
704 IEM_MC_BEGIN(3, 0);
705 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
706 IEM_MC_ARG(uint16_t, u16Sel, 1);
707 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
708
709 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
710 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
711 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
712
713 IEM_MC_END();
714 return VINF_SUCCESS;
715 }
716
717 case IEMMODE_32BIT:
718 case IEMMODE_64BIT:
719 {
720 IEM_MC_BEGIN(3, 0);
721 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
722 IEM_MC_ARG(uint16_t, u16Sel, 1);
723 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
724
725 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
726 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
727 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
728
729 IEM_MC_END();
730 return VINF_SUCCESS;
731 }
732
733 IEM_NOT_REACHED_DEFAULT_CASE_RET();
734 }
735 }
736 else
737 {
738 switch (pVCpu->iem.s.enmEffOpSize)
739 {
740 case IEMMODE_16BIT:
741 {
742 IEM_MC_BEGIN(3, 1);
743 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
744 IEM_MC_ARG(uint16_t, u16Sel, 1);
745 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
747
748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
749 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
750
751 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
752 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
753 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
754
755 IEM_MC_END();
756 return VINF_SUCCESS;
757 }
758
759 case IEMMODE_32BIT:
760 case IEMMODE_64BIT:
761 {
762 IEM_MC_BEGIN(3, 1);
763 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
764 IEM_MC_ARG(uint16_t, u16Sel, 1);
765 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
767
768 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
769 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
770/** @todo testcase: make sure it's a 16-bit read. */
771
772 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
773 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
774 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
775
776 IEM_MC_END();
777 return VINF_SUCCESS;
778 }
779
780 IEM_NOT_REACHED_DEFAULT_CASE_RET();
781 }
782 }
783}
784
785
786
787/** Opcode 0x0f 0x02. */
788FNIEMOP_DEF(iemOp_lar_Gv_Ew)
789{
790 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
791 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
792}
793
794
795/** Opcode 0x0f 0x03. */
796FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
797{
798 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
799 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
800}
801
802
803/** Opcode 0x0f 0x05. */
804FNIEMOP_DEF(iemOp_syscall)
805{
806 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
808 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
809}
810
811
812/** Opcode 0x0f 0x06. */
813FNIEMOP_DEF(iemOp_clts)
814{
815 IEMOP_MNEMONIC(clts, "clts");
816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
817 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
818}
819
820
821/** Opcode 0x0f 0x07. */
822FNIEMOP_DEF(iemOp_sysret)
823{
824 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
826 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
827}
828
829
830/** Opcode 0x0f 0x08. */
831FNIEMOP_DEF(iemOp_invd)
832{
833 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
834 IEMOP_HLP_MIN_486();
835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
836 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
837}
838
839
840/** Opcode 0x0f 0x09. */
841FNIEMOP_DEF(iemOp_wbinvd)
842{
843 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
844 IEMOP_HLP_MIN_486();
845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
846 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
847}
848
849
850/** Opcode 0x0f 0x0b. */
851FNIEMOP_DEF(iemOp_ud2)
852{
853 IEMOP_MNEMONIC(ud2, "ud2");
854 return IEMOP_RAISE_INVALID_OPCODE();
855}
856
857/** Opcode 0x0f 0x0d. */
858FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
859{
860 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
861 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
862 {
863 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
864 return IEMOP_RAISE_INVALID_OPCODE();
865 }
866
867 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
868 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
869 {
870 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
871 return IEMOP_RAISE_INVALID_OPCODE();
872 }
873
874 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
875 {
876 case 2: /* Aliased to /0 for the time being. */
877 case 4: /* Aliased to /0 for the time being. */
878 case 5: /* Aliased to /0 for the time being. */
879 case 6: /* Aliased to /0 for the time being. */
880 case 7: /* Aliased to /0 for the time being. */
881 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
882 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
883 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
884 IEM_NOT_REACHED_DEFAULT_CASE_RET();
885 }
886
887 IEM_MC_BEGIN(0, 1);
888 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
889 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
891 /* Currently a NOP. */
892 NOREF(GCPtrEffSrc);
893 IEM_MC_ADVANCE_RIP();
894 IEM_MC_END();
895 return VINF_SUCCESS;
896}
897
898
899/** Opcode 0x0f 0x0e. */
900FNIEMOP_DEF(iemOp_femms)
901{
902 IEMOP_MNEMONIC(femms, "femms");
903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
904
905 IEM_MC_BEGIN(0,0);
906 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
907 IEM_MC_MAYBE_RAISE_FPU_XCPT();
908 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
909 IEM_MC_FPU_FROM_MMX_MODE();
910 IEM_MC_ADVANCE_RIP();
911 IEM_MC_END();
912 return VINF_SUCCESS;
913}
914
915
916/** Opcode 0x0f 0x0f. */
917FNIEMOP_DEF(iemOp_3Dnow)
918{
919 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
920 {
921 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
922 return IEMOP_RAISE_INVALID_OPCODE();
923 }
924
925#ifdef IEM_WITH_3DNOW
926 /* This is pretty sparse, use switch instead of table. */
927 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
928 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
929#else
930 IEMOP_BITCH_ABOUT_STUB();
931 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
932#endif
933}
934
935
936/**
937 * @opcode 0x10
938 * @oppfx none
939 * @opcpuid sse
940 * @opgroup og_sse_simdfp_datamove
941 * @opxcpttype 4UA
942 * @optest op1=1 op2=2 -> op1=2
943 * @optest op1=0 op2=-22 -> op1=-22
944 */
945FNIEMOP_DEF(iemOp_movups_Vps_Wps)
946{
947 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
948 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
949 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
950 {
951 /*
952 * Register, register.
953 */
954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
955 IEM_MC_BEGIN(0, 0);
956 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
957 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
958 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
959 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
960 IEM_MC_ADVANCE_RIP();
961 IEM_MC_END();
962 }
963 else
964 {
965 /*
966 * Memory, register.
967 */
968 IEM_MC_BEGIN(0, 2);
969 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
971
972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
974 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
975 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
976
977 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
978 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
979
980 IEM_MC_ADVANCE_RIP();
981 IEM_MC_END();
982 }
983 return VINF_SUCCESS;
984
985}
986
987
988/**
989 * @opcode 0x10
990 * @oppfx 0x66
991 * @opcpuid sse2
992 * @opgroup og_sse2_pcksclr_datamove
993 * @opxcpttype 4UA
994 * @optest op1=1 op2=2 -> op1=2
995 * @optest op1=0 op2=-42 -> op1=-42
996 */
997FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
998{
999 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1000 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1001 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1002 {
1003 /*
1004 * Register, register.
1005 */
1006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1007 IEM_MC_BEGIN(0, 0);
1008 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1009 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1010 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1011 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1012 IEM_MC_ADVANCE_RIP();
1013 IEM_MC_END();
1014 }
1015 else
1016 {
1017 /*
1018 * Memory, register.
1019 */
1020 IEM_MC_BEGIN(0, 2);
1021 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1022 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1023
1024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1026 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1027 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1028
1029 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1030 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1031
1032 IEM_MC_ADVANCE_RIP();
1033 IEM_MC_END();
1034 }
1035 return VINF_SUCCESS;
1036}
1037
1038
1039/**
1040 * @opcode 0x10
1041 * @oppfx 0xf3
1042 * @opcpuid sse
1043 * @opgroup og_sse_simdfp_datamove
1044 * @opxcpttype 5
1045 * @optest op1=1 op2=2 -> op1=2
1046 * @optest op1=0 op2=-22 -> op1=-22
1047 */
1048FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1049{
1050 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1052 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1053 {
1054 /*
1055 * Register, register.
1056 */
1057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1058 IEM_MC_BEGIN(0, 1);
1059 IEM_MC_LOCAL(uint32_t, uSrc);
1060
1061 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1062 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1063 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1064 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1065
1066 IEM_MC_ADVANCE_RIP();
1067 IEM_MC_END();
1068 }
1069 else
1070 {
1071 /*
1072 * Memory, register.
1073 */
1074 IEM_MC_BEGIN(0, 2);
1075 IEM_MC_LOCAL(uint32_t, uSrc);
1076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1077
1078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1080 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1081 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1082
1083 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1084 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1085
1086 IEM_MC_ADVANCE_RIP();
1087 IEM_MC_END();
1088 }
1089 return VINF_SUCCESS;
1090}
1091
1092
1093/**
1094 * @opcode 0x10
1095 * @oppfx 0xf2
1096 * @opcpuid sse2
1097 * @opgroup og_sse2_pcksclr_datamove
1098 * @opxcpttype 5
1099 * @optest op1=1 op2=2 -> op1=2
1100 * @optest op1=0 op2=-42 -> op1=-42
1101 */
1102FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1103{
1104 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1106 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1107 {
1108 /*
1109 * Register, register.
1110 */
1111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1112 IEM_MC_BEGIN(0, 1);
1113 IEM_MC_LOCAL(uint64_t, uSrc);
1114
1115 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1116 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1117 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1118 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1119
1120 IEM_MC_ADVANCE_RIP();
1121 IEM_MC_END();
1122 }
1123 else
1124 {
1125 /*
1126 * Memory, register.
1127 */
1128 IEM_MC_BEGIN(0, 2);
1129 IEM_MC_LOCAL(uint64_t, uSrc);
1130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1131
1132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1134 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1135 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1136
1137 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1138 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1139
1140 IEM_MC_ADVANCE_RIP();
1141 IEM_MC_END();
1142 }
1143 return VINF_SUCCESS;
1144}
1145
1146
1147/**
1148 * @opcode 0x11
1149 * @oppfx none
1150 * @opcpuid sse
1151 * @opgroup og_sse_simdfp_datamove
1152 * @opxcpttype 4UA
1153 * @optest op1=1 op2=2 -> op1=2
1154 * @optest op1=0 op2=-42 -> op1=-42
1155 */
1156FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1157{
1158 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1159 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1160 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1161 {
1162 /*
1163 * Register, register.
1164 */
1165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1166 IEM_MC_BEGIN(0, 0);
1167 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1168 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1169 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1170 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1171 IEM_MC_ADVANCE_RIP();
1172 IEM_MC_END();
1173 }
1174 else
1175 {
1176 /*
1177 * Memory, register.
1178 */
1179 IEM_MC_BEGIN(0, 2);
1180 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1181 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1182
1183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1185 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1186 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1187
1188 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1189 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1190
1191 IEM_MC_ADVANCE_RIP();
1192 IEM_MC_END();
1193 }
1194 return VINF_SUCCESS;
1195}
1196
1197
1198/**
1199 * @opcode 0x11
1200 * @oppfx 0x66
1201 * @opcpuid sse2
1202 * @opgroup og_sse2_pcksclr_datamove
1203 * @opxcpttype 4UA
1204 * @optest op1=1 op2=2 -> op1=2
1205 * @optest op1=0 op2=-42 -> op1=-42
1206 */
1207FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1208{
1209 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1210 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1211 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1212 {
1213 /*
1214 * Register, register.
1215 */
1216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1217 IEM_MC_BEGIN(0, 0);
1218 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1219 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1220 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1221 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1222 IEM_MC_ADVANCE_RIP();
1223 IEM_MC_END();
1224 }
1225 else
1226 {
1227 /*
1228 * Memory, register.
1229 */
1230 IEM_MC_BEGIN(0, 2);
1231 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1233
1234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1236 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1237 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1238
1239 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1240 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1241
1242 IEM_MC_ADVANCE_RIP();
1243 IEM_MC_END();
1244 }
1245 return VINF_SUCCESS;
1246}
1247
1248
1249/**
1250 * @opcode 0x11
1251 * @oppfx 0xf3
1252 * @opcpuid sse
1253 * @opgroup og_sse_simdfp_datamove
1254 * @opxcpttype 5
1255 * @optest op1=1 op2=2 -> op1=2
1256 * @optest op1=0 op2=-22 -> op1=-22
1257 */
1258FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1259{
1260 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1262 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1263 {
1264 /*
1265 * Register, register.
1266 */
1267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1268 IEM_MC_BEGIN(0, 1);
1269 IEM_MC_LOCAL(uint32_t, uSrc);
1270
1271 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1272 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1273 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1274 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1275
1276 IEM_MC_ADVANCE_RIP();
1277 IEM_MC_END();
1278 }
1279 else
1280 {
1281 /*
1282 * Memory, register.
1283 */
1284 IEM_MC_BEGIN(0, 2);
1285 IEM_MC_LOCAL(uint32_t, uSrc);
1286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1287
1288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1290 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1291 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1292
1293 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1294 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1295
1296 IEM_MC_ADVANCE_RIP();
1297 IEM_MC_END();
1298 }
1299 return VINF_SUCCESS;
1300}
1301
1302
1303/**
1304 * @opcode 0x11
1305 * @oppfx 0xf2
1306 * @opcpuid sse2
1307 * @opgroup og_sse2_pcksclr_datamove
1308 * @opxcpttype 5
1309 * @optest op1=1 op2=2 -> op1=2
1310 * @optest op1=0 op2=-42 -> op1=-42
1311 */
1312FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1313{
1314 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1315 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1316 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1317 {
1318 /*
1319 * Register, register.
1320 */
1321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1322 IEM_MC_BEGIN(0, 1);
1323 IEM_MC_LOCAL(uint64_t, uSrc);
1324
1325 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1326 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1327 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1328 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1329
1330 IEM_MC_ADVANCE_RIP();
1331 IEM_MC_END();
1332 }
1333 else
1334 {
1335 /*
1336 * Memory, register.
1337 */
1338 IEM_MC_BEGIN(0, 2);
1339 IEM_MC_LOCAL(uint64_t, uSrc);
1340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1341
1342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1344 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1345 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1346
1347 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1348 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1349
1350 IEM_MC_ADVANCE_RIP();
1351 IEM_MC_END();
1352 }
1353 return VINF_SUCCESS;
1354}
1355
1356
1357FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1358{
1359 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1360 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1361 {
1362 /**
1363 * @opcode 0x12
1364 * @opcodesub 11 mr/reg
1365 * @oppfx none
1366 * @opcpuid sse
1367 * @opgroup og_sse_simdfp_datamove
1368 * @opxcpttype 5
1369 * @optest op1=1 op2=2 -> op1=2
1370 * @optest op1=0 op2=-42 -> op1=-42
1371 */
1372 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1373
1374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1375 IEM_MC_BEGIN(0, 1);
1376 IEM_MC_LOCAL(uint64_t, uSrc);
1377
1378 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1379 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1380 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1381 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1382
1383 IEM_MC_ADVANCE_RIP();
1384 IEM_MC_END();
1385 }
1386 else
1387 {
1388 /**
1389 * @opdone
1390 * @opcode 0x12
1391 * @opcodesub !11 mr/reg
1392 * @oppfx none
1393 * @opcpuid sse
1394 * @opgroup og_sse_simdfp_datamove
1395 * @opxcpttype 5
1396 * @optest op1=1 op2=2 -> op1=2
1397 * @optest op1=0 op2=-42 -> op1=-42
1398 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1399 */
1400 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1401
1402 IEM_MC_BEGIN(0, 2);
1403 IEM_MC_LOCAL(uint64_t, uSrc);
1404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1405
1406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1408 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1409 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1410
1411 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1412 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1413
1414 IEM_MC_ADVANCE_RIP();
1415 IEM_MC_END();
1416 }
1417 return VINF_SUCCESS;
1418}
1419
1420
1421/**
1422 * @opcode 0x12
1423 * @opcodesub !11 mr/reg
1424 * @oppfx 0x66
1425 * @opcpuid sse2
1426 * @opgroup og_sse2_pcksclr_datamove
1427 * @opxcpttype 5
1428 * @optest op1=1 op2=2 -> op1=2
1429 * @optest op1=0 op2=-42 -> op1=-42
1430 */
1431FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1432{
1433 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1434 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1435 {
1436 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1437
1438 IEM_MC_BEGIN(0, 2);
1439 IEM_MC_LOCAL(uint64_t, uSrc);
1440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1441
1442 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1444 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1445 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1446
1447 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1448 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1449
1450 IEM_MC_ADVANCE_RIP();
1451 IEM_MC_END();
1452 return VINF_SUCCESS;
1453 }
1454
1455 /**
1456 * @opdone
1457 * @opmnemonic ud660f12m3
1458 * @opcode 0x12
1459 * @opcodesub 11 mr/reg
1460 * @oppfx 0x66
1461 * @opunused immediate
1462 * @opcpuid sse
1463 * @optest ->
1464 */
1465 return IEMOP_RAISE_INVALID_OPCODE();
1466}
1467
1468
1469/**
1470 * @opcode 0x12
1471 * @oppfx 0xf3
1472 * @opcpuid sse3
1473 * @opgroup og_sse3_pcksclr_datamove
1474 * @opxcpttype 4
1475 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1476 * op1=0x00000002000000020000000100000001
1477 */
1478FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1479{
1480 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1481 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1482 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1483 {
1484 /*
1485 * Register, register.
1486 */
1487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1488 IEM_MC_BEGIN(2, 0);
1489 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1490 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1491
1492 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1493 IEM_MC_PREPARE_SSE_USAGE();
1494
1495 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1496 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1497 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1498
1499 IEM_MC_ADVANCE_RIP();
1500 IEM_MC_END();
1501 }
1502 else
1503 {
1504 /*
1505 * Register, memory.
1506 */
1507 IEM_MC_BEGIN(2, 2);
1508 IEM_MC_LOCAL(RTUINT128U, uSrc);
1509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1510 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1511 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1512
1513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1515 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1516 IEM_MC_PREPARE_SSE_USAGE();
1517
1518 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1519 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1520 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1521
1522 IEM_MC_ADVANCE_RIP();
1523 IEM_MC_END();
1524 }
1525 return VINF_SUCCESS;
1526}
1527
1528
1529/**
1530 * @opcode 0x12
1531 * @oppfx 0xf2
1532 * @opcpuid sse3
1533 * @opgroup og_sse3_pcksclr_datamove
1534 * @opxcpttype 5
1535 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1536 * op1=0x22222222111111112222222211111111
1537 */
1538FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1539{
1540 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1541 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1542 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1543 {
1544 /*
1545 * Register, register.
1546 */
1547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1548 IEM_MC_BEGIN(2, 0);
1549 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1550 IEM_MC_ARG(uint64_t, uSrc, 1);
1551
1552 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1553 IEM_MC_PREPARE_SSE_USAGE();
1554
1555 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1556 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1557 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1558
1559 IEM_MC_ADVANCE_RIP();
1560 IEM_MC_END();
1561 }
1562 else
1563 {
1564 /*
1565 * Register, memory.
1566 */
1567 IEM_MC_BEGIN(2, 2);
1568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1569 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1570 IEM_MC_ARG(uint64_t, uSrc, 1);
1571
1572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1574 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1575 IEM_MC_PREPARE_SSE_USAGE();
1576
1577 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1578 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1579 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1580
1581 IEM_MC_ADVANCE_RIP();
1582 IEM_MC_END();
1583 }
1584 return VINF_SUCCESS;
1585}
1586
1587
1588/**
1589 * @opcode 0x13
1590 * @opcodesub !11 mr/reg
1591 * @oppfx none
1592 * @opcpuid sse
1593 * @opgroup og_sse_simdfp_datamove
1594 * @opxcpttype 5
1595 * @optest op1=1 op2=2 -> op1=2
1596 * @optest op1=0 op2=-42 -> op1=-42
1597 */
1598FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1599{
1600 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1601 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1602 {
1603 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1604
1605 IEM_MC_BEGIN(0, 2);
1606 IEM_MC_LOCAL(uint64_t, uSrc);
1607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1608
1609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1611 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1612 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1613
1614 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1615 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1616
1617 IEM_MC_ADVANCE_RIP();
1618 IEM_MC_END();
1619 return VINF_SUCCESS;
1620 }
1621
1622 /**
1623 * @opdone
1624 * @opmnemonic ud0f13m3
1625 * @opcode 0x13
1626 * @opcodesub 11 mr/reg
1627 * @oppfx none
1628 * @opunused immediate
1629 * @opcpuid sse
1630 * @optest ->
1631 */
1632 return IEMOP_RAISE_INVALID_OPCODE();
1633}
1634
1635
1636/**
1637 * @opcode 0x13
1638 * @opcodesub !11 mr/reg
1639 * @oppfx 0x66
1640 * @opcpuid sse2
1641 * @opgroup og_sse2_pcksclr_datamove
1642 * @opxcpttype 5
1643 * @optest op1=1 op2=2 -> op1=2
1644 * @optest op1=0 op2=-42 -> op1=-42
1645 */
1646FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1647{
1648 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1649 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1650 {
1651 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1652 IEM_MC_BEGIN(0, 2);
1653 IEM_MC_LOCAL(uint64_t, uSrc);
1654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1655
1656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1658 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1659 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1660
1661 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1662 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1663
1664 IEM_MC_ADVANCE_RIP();
1665 IEM_MC_END();
1666 return VINF_SUCCESS;
1667 }
1668
1669 /**
1670 * @opdone
1671 * @opmnemonic ud660f13m3
1672 * @opcode 0x13
1673 * @opcodesub 11 mr/reg
1674 * @oppfx 0x66
1675 * @opunused immediate
1676 * @opcpuid sse
1677 * @optest ->
1678 */
1679 return IEMOP_RAISE_INVALID_OPCODE();
1680}
1681
1682
1683/**
1684 * @opmnemonic udf30f13
1685 * @opcode 0x13
1686 * @oppfx 0xf3
1687 * @opunused intel-modrm
1688 * @opcpuid sse
1689 * @optest ->
1690 * @opdone
1691 */
1692
1693/**
1694 * @opmnemonic udf20f13
1695 * @opcode 0x13
1696 * @oppfx 0xf2
1697 * @opunused intel-modrm
1698 * @opcpuid sse
1699 * @optest ->
1700 * @opdone
1701 */
1702
1703/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1704FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1705/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1706FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1707
1708/**
1709 * @opdone
1710 * @opmnemonic udf30f14
1711 * @opcode 0x14
1712 * @oppfx 0xf3
1713 * @opunused intel-modrm
1714 * @opcpuid sse
1715 * @optest ->
1716 * @opdone
1717 */
1718
1719/**
1720 * @opmnemonic udf20f14
1721 * @opcode 0x14
1722 * @oppfx 0xf2
1723 * @opunused intel-modrm
1724 * @opcpuid sse
1725 * @optest ->
1726 * @opdone
1727 */
1728
1729/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1730FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1731/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1732FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1733/* Opcode 0xf3 0x0f 0x15 - invalid */
1734/* Opcode 0xf2 0x0f 0x15 - invalid */
1735
1736/**
1737 * @opdone
1738 * @opmnemonic udf30f15
1739 * @opcode 0x15
1740 * @oppfx 0xf3
1741 * @opunused intel-modrm
1742 * @opcpuid sse
1743 * @optest ->
1744 * @opdone
1745 */
1746
1747/**
1748 * @opmnemonic udf20f15
1749 * @opcode 0x15
1750 * @oppfx 0xf2
1751 * @opunused intel-modrm
1752 * @opcpuid sse
1753 * @optest ->
1754 * @opdone
1755 */
1756
1757FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1758{
1759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1760 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1761 {
1762 /**
1763 * @opcode 0x16
1764 * @opcodesub 11 mr/reg
1765 * @oppfx none
1766 * @opcpuid sse
1767 * @opgroup og_sse_simdfp_datamove
1768 * @opxcpttype 5
1769 * @optest op1=1 op2=2 -> op1=2
1770 * @optest op1=0 op2=-42 -> op1=-42
1771 */
1772 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1773
1774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1775 IEM_MC_BEGIN(0, 1);
1776 IEM_MC_LOCAL(uint64_t, uSrc);
1777
1778 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1779 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1780 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1781 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1782
1783 IEM_MC_ADVANCE_RIP();
1784 IEM_MC_END();
1785 }
1786 else
1787 {
1788 /**
1789 * @opdone
1790 * @opcode 0x16
1791 * @opcodesub !11 mr/reg
1792 * @oppfx none
1793 * @opcpuid sse
1794 * @opgroup og_sse_simdfp_datamove
1795 * @opxcpttype 5
1796 * @optest op1=1 op2=2 -> op1=2
1797 * @optest op1=0 op2=-42 -> op1=-42
1798 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1799 */
1800 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1801
1802 IEM_MC_BEGIN(0, 2);
1803 IEM_MC_LOCAL(uint64_t, uSrc);
1804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1805
1806 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1808 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1809 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1810
1811 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1812 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1813
1814 IEM_MC_ADVANCE_RIP();
1815 IEM_MC_END();
1816 }
1817 return VINF_SUCCESS;
1818}
1819
1820
1821/**
1822 * @opcode 0x16
1823 * @opcodesub !11 mr/reg
1824 * @oppfx 0x66
1825 * @opcpuid sse2
1826 * @opgroup og_sse2_pcksclr_datamove
1827 * @opxcpttype 5
1828 * @optest op1=1 op2=2 -> op1=2
1829 * @optest op1=0 op2=-42 -> op1=-42
1830 */
1831FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1832{
1833 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1834 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1835 {
1836 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1837 IEM_MC_BEGIN(0, 2);
1838 IEM_MC_LOCAL(uint64_t, uSrc);
1839 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1840
1841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1843 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1844 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1845
1846 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1847 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1848
1849 IEM_MC_ADVANCE_RIP();
1850 IEM_MC_END();
1851 return VINF_SUCCESS;
1852 }
1853
1854 /**
1855 * @opdone
1856 * @opmnemonic ud660f16m3
1857 * @opcode 0x16
1858 * @opcodesub 11 mr/reg
1859 * @oppfx 0x66
1860 * @opunused immediate
1861 * @opcpuid sse
1862 * @optest ->
1863 */
1864 return IEMOP_RAISE_INVALID_OPCODE();
1865}
1866
1867
1868/**
1869 * @opcode 0x16
1870 * @oppfx 0xf3
1871 * @opcpuid sse3
1872 * @opgroup og_sse3_pcksclr_datamove
1873 * @opxcpttype 4
1874 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1875 * op1=0x00000002000000020000000100000001
1876 */
1877FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1878{
1879 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1880 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1881 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1882 {
1883 /*
1884 * Register, register.
1885 */
1886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1887 IEM_MC_BEGIN(2, 0);
1888 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1889 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1890
1891 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1892 IEM_MC_PREPARE_SSE_USAGE();
1893
1894 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1895 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1896 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1897
1898 IEM_MC_ADVANCE_RIP();
1899 IEM_MC_END();
1900 }
1901 else
1902 {
1903 /*
1904 * Register, memory.
1905 */
1906 IEM_MC_BEGIN(2, 2);
1907 IEM_MC_LOCAL(RTUINT128U, uSrc);
1908 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1909 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1910 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1911
1912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1914 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1915 IEM_MC_PREPARE_SSE_USAGE();
1916
1917 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1918 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1919 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1920
1921 IEM_MC_ADVANCE_RIP();
1922 IEM_MC_END();
1923 }
1924 return VINF_SUCCESS;
1925}
1926
1927/**
1928 * @opdone
1929 * @opmnemonic udf30f16
1930 * @opcode 0x16
1931 * @oppfx 0xf2
1932 * @opunused intel-modrm
1933 * @opcpuid sse
1934 * @optest ->
1935 * @opdone
1936 */
1937
1938
1939/**
1940 * @opcode 0x17
1941 * @opcodesub !11 mr/reg
1942 * @oppfx none
1943 * @opcpuid sse
1944 * @opgroup og_sse_simdfp_datamove
1945 * @opxcpttype 5
1946 * @optest op1=1 op2=2 -> op1=2
1947 * @optest op1=0 op2=-42 -> op1=-42
1948 */
1949FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
1950{
1951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1952 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1953 {
1954 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1955
1956 IEM_MC_BEGIN(0, 2);
1957 IEM_MC_LOCAL(uint64_t, uSrc);
1958 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1959
1960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1962 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1963 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1964
1965 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1966 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1967
1968 IEM_MC_ADVANCE_RIP();
1969 IEM_MC_END();
1970 return VINF_SUCCESS;
1971 }
1972
1973 /**
1974 * @opdone
1975 * @opmnemonic ud0f17m3
1976 * @opcode 0x17
1977 * @opcodesub 11 mr/reg
1978 * @oppfx none
1979 * @opunused immediate
1980 * @opcpuid sse
1981 * @optest ->
1982 */
1983 return IEMOP_RAISE_INVALID_OPCODE();
1984}
1985
1986
1987/**
1988 * @opcode 0x17
1989 * @opcodesub !11 mr/reg
1990 * @oppfx 0x66
1991 * @opcpuid sse2
1992 * @opgroup og_sse2_pcksclr_datamove
1993 * @opxcpttype 5
1994 * @optest op1=1 op2=2 -> op1=2
1995 * @optest op1=0 op2=-42 -> op1=-42
1996 */
1997FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
1998{
1999 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2000 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2001 {
2002 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2003
2004 IEM_MC_BEGIN(0, 2);
2005 IEM_MC_LOCAL(uint64_t, uSrc);
2006 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2007
2008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2010 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2011 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2012
2013 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2014 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2015
2016 IEM_MC_ADVANCE_RIP();
2017 IEM_MC_END();
2018 return VINF_SUCCESS;
2019 }
2020
2021 /**
2022 * @opdone
2023 * @opmnemonic ud660f17m3
2024 * @opcode 0x17
2025 * @opcodesub 11 mr/reg
2026 * @oppfx 0x66
2027 * @opunused immediate
2028 * @opcpuid sse
2029 * @optest ->
2030 */
2031 return IEMOP_RAISE_INVALID_OPCODE();
2032}
2033
2034
2035/**
2036 * @opdone
2037 * @opmnemonic udf30f17
2038 * @opcode 0x17
2039 * @oppfx 0xf3
2040 * @opunused intel-modrm
2041 * @opcpuid sse
2042 * @optest ->
2043 * @opdone
2044 */
2045
2046/**
2047 * @opmnemonic udf20f17
2048 * @opcode 0x17
2049 * @oppfx 0xf2
2050 * @opunused intel-modrm
2051 * @opcpuid sse
2052 * @optest ->
2053 * @opdone
2054 */
2055
2056
2057/** Opcode 0x0f 0x18. */
2058FNIEMOP_DEF(iemOp_prefetch_Grp16)
2059{
2060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2061 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2062 {
2063 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2064 {
2065 case 4: /* Aliased to /0 for the time being according to AMD. */
2066 case 5: /* Aliased to /0 for the time being according to AMD. */
2067 case 6: /* Aliased to /0 for the time being according to AMD. */
2068 case 7: /* Aliased to /0 for the time being according to AMD. */
2069 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2070 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2071 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2072 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2074 }
2075
2076 IEM_MC_BEGIN(0, 1);
2077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2080 /* Currently a NOP. */
2081 NOREF(GCPtrEffSrc);
2082 IEM_MC_ADVANCE_RIP();
2083 IEM_MC_END();
2084 return VINF_SUCCESS;
2085 }
2086
2087 return IEMOP_RAISE_INVALID_OPCODE();
2088}
2089
2090
2091/** Opcode 0x0f 0x19..0x1f. */
2092FNIEMOP_DEF(iemOp_nop_Ev)
2093{
2094 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2095 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2096 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2097 {
2098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2099 IEM_MC_BEGIN(0, 0);
2100 IEM_MC_ADVANCE_RIP();
2101 IEM_MC_END();
2102 }
2103 else
2104 {
2105 IEM_MC_BEGIN(0, 1);
2106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2109 /* Currently a NOP. */
2110 NOREF(GCPtrEffSrc);
2111 IEM_MC_ADVANCE_RIP();
2112 IEM_MC_END();
2113 }
2114 return VINF_SUCCESS;
2115}
2116
2117
2118/** Opcode 0x0f 0x20. */
2119FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2120{
2121 /* mod is ignored, as is operand size overrides. */
2122 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2123 IEMOP_HLP_MIN_386();
2124 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2125 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2126 else
2127 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2128
2129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2130 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2131 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2132 {
2133 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2134 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2135 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2136 iCrReg |= 8;
2137 }
2138 switch (iCrReg)
2139 {
2140 case 0: case 2: case 3: case 4: case 8:
2141 break;
2142 default:
2143 return IEMOP_RAISE_INVALID_OPCODE();
2144 }
2145 IEMOP_HLP_DONE_DECODING();
2146
2147 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2148}
2149
2150
2151/** Opcode 0x0f 0x21. */
2152FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2153{
2154 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2155 IEMOP_HLP_MIN_386();
2156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2158 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2159 return IEMOP_RAISE_INVALID_OPCODE();
2160 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2161 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2162 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2163}
2164
2165
2166/** Opcode 0x0f 0x22. */
2167FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2168{
2169 /* mod is ignored, as is operand size overrides. */
2170 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2171 IEMOP_HLP_MIN_386();
2172 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2173 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2174 else
2175 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2176
2177 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2178 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2179 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2180 {
2181 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2182 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2183 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2184 iCrReg |= 8;
2185 }
2186 switch (iCrReg)
2187 {
2188 case 0: case 2: case 3: case 4: case 8:
2189 break;
2190 default:
2191 return IEMOP_RAISE_INVALID_OPCODE();
2192 }
2193 IEMOP_HLP_DONE_DECODING();
2194
2195 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2196}
2197
2198
2199/** Opcode 0x0f 0x23. */
2200FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2201{
2202 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2203 IEMOP_HLP_MIN_386();
2204 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2206 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2207 return IEMOP_RAISE_INVALID_OPCODE();
2208 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2209 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2210 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2211}
2212
2213
2214/** Opcode 0x0f 0x24. */
2215FNIEMOP_DEF(iemOp_mov_Rd_Td)
2216{
2217 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2218 /** @todo works on 386 and 486. */
2219 /* The RM byte is not considered, see testcase. */
2220 return IEMOP_RAISE_INVALID_OPCODE();
2221}
2222
2223
2224/** Opcode 0x0f 0x26. */
2225FNIEMOP_DEF(iemOp_mov_Td_Rd)
2226{
2227 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2228 /** @todo works on 386 and 486. */
2229 /* The RM byte is not considered, see testcase. */
2230 return IEMOP_RAISE_INVALID_OPCODE();
2231}
2232
2233
2234/**
2235 * @opcode 0x28
2236 * @oppfx none
2237 * @opcpuid sse
2238 * @opgroup og_sse_simdfp_datamove
2239 * @opxcpttype 1
2240 * @optest op1=1 op2=2 -> op1=2
2241 * @optest op1=0 op2=-42 -> op1=-42
2242 */
2243FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2244{
2245 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2246 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2247 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2248 {
2249 /*
2250 * Register, register.
2251 */
2252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2253 IEM_MC_BEGIN(0, 0);
2254 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2255 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2256 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2257 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2258 IEM_MC_ADVANCE_RIP();
2259 IEM_MC_END();
2260 }
2261 else
2262 {
2263 /*
2264 * Register, memory.
2265 */
2266 IEM_MC_BEGIN(0, 2);
2267 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2268 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2269
2270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2272 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2273 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2274
2275 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2276 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2277
2278 IEM_MC_ADVANCE_RIP();
2279 IEM_MC_END();
2280 }
2281 return VINF_SUCCESS;
2282}
2283
2284/**
2285 * @opcode 0x28
2286 * @oppfx 66
2287 * @opcpuid sse2
2288 * @opgroup og_sse2_pcksclr_datamove
2289 * @opxcpttype 1
2290 * @optest op1=1 op2=2 -> op1=2
2291 * @optest op1=0 op2=-42 -> op1=-42
2292 */
2293FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2294{
2295 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2297 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2298 {
2299 /*
2300 * Register, register.
2301 */
2302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2303 IEM_MC_BEGIN(0, 0);
2304 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2305 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2306 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2307 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2308 IEM_MC_ADVANCE_RIP();
2309 IEM_MC_END();
2310 }
2311 else
2312 {
2313 /*
2314 * Register, memory.
2315 */
2316 IEM_MC_BEGIN(0, 2);
2317 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2319
2320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2322 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2323 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2324
2325 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2326 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2327
2328 IEM_MC_ADVANCE_RIP();
2329 IEM_MC_END();
2330 }
2331 return VINF_SUCCESS;
2332}
2333
2334/* Opcode 0xf3 0x0f 0x28 - invalid */
2335/* Opcode 0xf2 0x0f 0x28 - invalid */
2336
2337/**
2338 * @opcode 0x29
2339 * @oppfx none
2340 * @opcpuid sse
2341 * @opgroup og_sse_simdfp_datamove
2342 * @opxcpttype 1
2343 * @optest op1=1 op2=2 -> op1=2
2344 * @optest op1=0 op2=-42 -> op1=-42
2345 */
2346FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2347{
2348 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2349 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2350 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2351 {
2352 /*
2353 * Register, register.
2354 */
2355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2356 IEM_MC_BEGIN(0, 0);
2357 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2358 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2359 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2360 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2361 IEM_MC_ADVANCE_RIP();
2362 IEM_MC_END();
2363 }
2364 else
2365 {
2366 /*
2367 * Memory, register.
2368 */
2369 IEM_MC_BEGIN(0, 2);
2370 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2372
2373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2375 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2376 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2377
2378 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2379 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2380
2381 IEM_MC_ADVANCE_RIP();
2382 IEM_MC_END();
2383 }
2384 return VINF_SUCCESS;
2385}
2386
2387/**
2388 * @opcode 0x29
2389 * @oppfx 66
2390 * @opcpuid sse2
2391 * @opgroup og_sse2_pcksclr_datamove
2392 * @opxcpttype 1
2393 * @optest op1=1 op2=2 -> op1=2
2394 * @optest op1=0 op2=-42 -> op1=-42
2395 */
2396FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2397{
2398 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2399 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2400 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2401 {
2402 /*
2403 * Register, register.
2404 */
2405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2406 IEM_MC_BEGIN(0, 0);
2407 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2408 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2409 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2410 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2411 IEM_MC_ADVANCE_RIP();
2412 IEM_MC_END();
2413 }
2414 else
2415 {
2416 /*
2417 * Memory, register.
2418 */
2419 IEM_MC_BEGIN(0, 2);
2420 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2422
2423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2425 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2426 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2427
2428 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2429 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2430
2431 IEM_MC_ADVANCE_RIP();
2432 IEM_MC_END();
2433 }
2434 return VINF_SUCCESS;
2435}
2436
2437/* Opcode 0xf3 0x0f 0x29 - invalid */
2438/* Opcode 0xf2 0x0f 0x29 - invalid */
2439
2440
2441/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2442FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2443/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2444FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2445/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2446FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2447/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2448FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2449
2450
2451/**
2452 * @opcode 0x2b
2453 * @opcodesub !11 mr/reg
2454 * @oppfx none
2455 * @opcpuid sse
2456 * @opgroup og_sse1_cachect
2457 * @opxcpttype 1
2458 * @optest op1=1 op2=2 -> op1=2
2459 * @optest op1=0 op2=-42 -> op1=-42
2460 */
2461FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2462{
2463 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2464 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2465 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2466 {
2467 /*
2468 * memory, register.
2469 */
2470 IEM_MC_BEGIN(0, 2);
2471 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2473
2474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2476 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2477 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2478
2479 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2480 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2481
2482 IEM_MC_ADVANCE_RIP();
2483 IEM_MC_END();
2484 }
2485 /* The register, register encoding is invalid. */
2486 else
2487 return IEMOP_RAISE_INVALID_OPCODE();
2488 return VINF_SUCCESS;
2489}
2490
2491/**
2492 * @opcode 0x2b
2493 * @opcodesub !11 mr/reg
2494 * @oppfx 0x66
2495 * @opcpuid sse2
2496 * @opgroup og_sse2_cachect
2497 * @opxcpttype 1
2498 * @optest op1=1 op2=2 -> op1=2
2499 * @optest op1=0 op2=-42 -> op1=-42
2500 */
2501FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2502{
2503 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2504 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2505 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2506 {
2507 /*
2508 * memory, register.
2509 */
2510 IEM_MC_BEGIN(0, 2);
2511 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2513
2514 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2516 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2517 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2518
2519 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2520 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2521
2522 IEM_MC_ADVANCE_RIP();
2523 IEM_MC_END();
2524 }
2525 /* The register, register encoding is invalid. */
2526 else
2527 return IEMOP_RAISE_INVALID_OPCODE();
2528 return VINF_SUCCESS;
2529}
2530/* Opcode 0xf3 0x0f 0x2b - invalid */
2531/* Opcode 0xf2 0x0f 0x2b - invalid */
2532
2533
2534/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2535FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2536/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2537FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2538/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2539FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2540/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2541FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2542
2543/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2544FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2545/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2546FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2547/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2548FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2549/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2550FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2551
2552/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2553FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2554/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2555FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2556/* Opcode 0xf3 0x0f 0x2e - invalid */
2557/* Opcode 0xf2 0x0f 0x2e - invalid */
2558
2559/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2560FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2561/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2562FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2563/* Opcode 0xf3 0x0f 0x2f - invalid */
2564/* Opcode 0xf2 0x0f 0x2f - invalid */
2565
2566/** Opcode 0x0f 0x30. */
2567FNIEMOP_DEF(iemOp_wrmsr)
2568{
2569 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2571 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2572}
2573
2574
2575/** Opcode 0x0f 0x31. */
2576FNIEMOP_DEF(iemOp_rdtsc)
2577{
2578 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2580 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2581}
2582
2583
2584/** Opcode 0x0f 0x33. */
2585FNIEMOP_DEF(iemOp_rdmsr)
2586{
2587 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2589 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2590}
2591
2592
2593/** Opcode 0x0f 0x34. */
2594FNIEMOP_DEF(iemOp_rdpmc)
2595{
2596 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2598 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2599}
2600
2601
2602/** Opcode 0x0f 0x34. */
2603FNIEMOP_STUB(iemOp_sysenter);
2604/** Opcode 0x0f 0x35. */
2605FNIEMOP_STUB(iemOp_sysexit);
2606/** Opcode 0x0f 0x37. */
2607FNIEMOP_STUB(iemOp_getsec);
2608
2609
2610/** Opcode 0x0f 0x38. */
2611FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2612{
2613#ifdef IEM_WITH_THREE_0F_38
2614 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2615 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2616#else
2617 IEMOP_BITCH_ABOUT_STUB();
2618 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2619#endif
2620}
2621
2622
2623/** Opcode 0x0f 0x3a. */
2624FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2625{
2626#ifdef IEM_WITH_THREE_0F_3A
2627 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2628 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2629#else
2630 IEMOP_BITCH_ABOUT_STUB();
2631 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2632#endif
2633}
2634
2635
2636/**
2637 * Implements a conditional move.
2638 *
2639 * Wish there was an obvious way to do this where we could share and reduce
2640 * code bloat.
2641 *
2642 * @param a_Cnd The conditional "microcode" operation.
2643 */
2644#define CMOV_X(a_Cnd) \
2645 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2646 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2647 { \
2648 switch (pVCpu->iem.s.enmEffOpSize) \
2649 { \
2650 case IEMMODE_16BIT: \
2651 IEM_MC_BEGIN(0, 1); \
2652 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2653 a_Cnd { \
2654 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2655 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2656 } IEM_MC_ENDIF(); \
2657 IEM_MC_ADVANCE_RIP(); \
2658 IEM_MC_END(); \
2659 return VINF_SUCCESS; \
2660 \
2661 case IEMMODE_32BIT: \
2662 IEM_MC_BEGIN(0, 1); \
2663 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2664 a_Cnd { \
2665 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2666 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2667 } IEM_MC_ELSE() { \
2668 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2669 } IEM_MC_ENDIF(); \
2670 IEM_MC_ADVANCE_RIP(); \
2671 IEM_MC_END(); \
2672 return VINF_SUCCESS; \
2673 \
2674 case IEMMODE_64BIT: \
2675 IEM_MC_BEGIN(0, 1); \
2676 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2677 a_Cnd { \
2678 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2679 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2680 } IEM_MC_ENDIF(); \
2681 IEM_MC_ADVANCE_RIP(); \
2682 IEM_MC_END(); \
2683 return VINF_SUCCESS; \
2684 \
2685 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2686 } \
2687 } \
2688 else \
2689 { \
2690 switch (pVCpu->iem.s.enmEffOpSize) \
2691 { \
2692 case IEMMODE_16BIT: \
2693 IEM_MC_BEGIN(0, 2); \
2694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2695 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2697 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2698 a_Cnd { \
2699 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2700 } IEM_MC_ENDIF(); \
2701 IEM_MC_ADVANCE_RIP(); \
2702 IEM_MC_END(); \
2703 return VINF_SUCCESS; \
2704 \
2705 case IEMMODE_32BIT: \
2706 IEM_MC_BEGIN(0, 2); \
2707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2708 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2710 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2711 a_Cnd { \
2712 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2713 } IEM_MC_ELSE() { \
2714 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2715 } IEM_MC_ENDIF(); \
2716 IEM_MC_ADVANCE_RIP(); \
2717 IEM_MC_END(); \
2718 return VINF_SUCCESS; \
2719 \
2720 case IEMMODE_64BIT: \
2721 IEM_MC_BEGIN(0, 2); \
2722 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2723 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2725 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2726 a_Cnd { \
2727 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2728 } IEM_MC_ENDIF(); \
2729 IEM_MC_ADVANCE_RIP(); \
2730 IEM_MC_END(); \
2731 return VINF_SUCCESS; \
2732 \
2733 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2734 } \
2735 } do {} while (0)
2736
2737
2738
2739/** Opcode 0x0f 0x40. */
2740FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2741{
2742 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2743 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2744}
2745
2746
2747/** Opcode 0x0f 0x41. */
2748FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2749{
2750 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2751 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2752}
2753
2754
2755/** Opcode 0x0f 0x42. */
2756FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2757{
2758 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2759 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2760}
2761
2762
2763/** Opcode 0x0f 0x43. */
2764FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2765{
2766 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2767 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2768}
2769
2770
2771/** Opcode 0x0f 0x44. */
2772FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2773{
2774 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2775 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2776}
2777
2778
2779/** Opcode 0x0f 0x45. */
2780FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2781{
2782 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2783 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2784}
2785
2786
2787/** Opcode 0x0f 0x46. */
2788FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2789{
2790 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2791 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2792}
2793
2794
2795/** Opcode 0x0f 0x47. */
2796FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2797{
2798 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2799 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2800}
2801
2802
2803/** Opcode 0x0f 0x48. */
2804FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2805{
2806 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2807 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2808}
2809
2810
2811/** Opcode 0x0f 0x49. */
2812FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2813{
2814 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2815 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2816}
2817
2818
2819/** Opcode 0x0f 0x4a. */
2820FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2821{
2822 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2823 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2824}
2825
2826
2827/** Opcode 0x0f 0x4b. */
2828FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2829{
2830 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2831 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2832}
2833
2834
2835/** Opcode 0x0f 0x4c. */
2836FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2837{
2838 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2839 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2840}
2841
2842
2843/** Opcode 0x0f 0x4d. */
2844FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2845{
2846 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2847 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2848}
2849
2850
2851/** Opcode 0x0f 0x4e. */
2852FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2853{
2854 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2855 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2856}
2857
2858
2859/** Opcode 0x0f 0x4f. */
2860FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2861{
2862 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2863 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2864}
2865
2866#undef CMOV_X
2867
2868/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2869FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2870/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2871FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2872/* Opcode 0xf3 0x0f 0x50 - invalid */
2873/* Opcode 0xf2 0x0f 0x50 - invalid */
2874
2875/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2876FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2877/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2878FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2879/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2880FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2881/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2882FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2883
2884/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2885FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2886/* Opcode 0x66 0x0f 0x52 - invalid */
2887/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2888FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2889/* Opcode 0xf2 0x0f 0x52 - invalid */
2890
2891/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2892FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2893/* Opcode 0x66 0x0f 0x53 - invalid */
2894/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2895FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2896/* Opcode 0xf2 0x0f 0x53 - invalid */
2897
2898/** Opcode 0x0f 0x54 - andps Vps, Wps */
2899FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2900/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2901FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2902/* Opcode 0xf3 0x0f 0x54 - invalid */
2903/* Opcode 0xf2 0x0f 0x54 - invalid */
2904
2905/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2906FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2907/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2908FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2909/* Opcode 0xf3 0x0f 0x55 - invalid */
2910/* Opcode 0xf2 0x0f 0x55 - invalid */
2911
2912/** Opcode 0x0f 0x56 - orps Vps, Wps */
2913FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2914/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2915FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2916/* Opcode 0xf3 0x0f 0x56 - invalid */
2917/* Opcode 0xf2 0x0f 0x56 - invalid */
2918
2919/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2920FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2921/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2922FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2923/* Opcode 0xf3 0x0f 0x57 - invalid */
2924/* Opcode 0xf2 0x0f 0x57 - invalid */
2925
2926/** Opcode 0x0f 0x58 - addps Vps, Wps */
2927FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2928/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2929FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2930/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2931FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2932/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2933FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2934
2935/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2936FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2937/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2938FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2939/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2940FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2941/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2942FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2943
2944/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2945FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2946/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2947FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2948/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2949FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2950/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2951FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2952
2953/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2954FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2955/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2956FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
2957/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
2958FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
2959/* Opcode 0xf2 0x0f 0x5b - invalid */
2960
2961/** Opcode 0x0f 0x5c - subps Vps, Wps */
2962FNIEMOP_STUB(iemOp_subps_Vps_Wps);
2963/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
2964FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
2965/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
2966FNIEMOP_STUB(iemOp_subss_Vss_Wss);
2967/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
2968FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
2969
2970/** Opcode 0x0f 0x5d - minps Vps, Wps */
2971FNIEMOP_STUB(iemOp_minps_Vps_Wps);
2972/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
2973FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
2974/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
2975FNIEMOP_STUB(iemOp_minss_Vss_Wss);
2976/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
2977FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
2978
2979/** Opcode 0x0f 0x5e - divps Vps, Wps */
2980FNIEMOP_STUB(iemOp_divps_Vps_Wps);
2981/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
2982FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
2983/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
2984FNIEMOP_STUB(iemOp_divss_Vss_Wss);
2985/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
2986FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
2987
2988/** Opcode 0x0f 0x5f - maxps Vps, Wps */
2989FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
2990/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
2991FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
2992/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
2993FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
2994/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
2995FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
2996
2997/**
2998 * Common worker for MMX instructions on the forms:
2999 * pxxxx mm1, mm2/mem32
3000 *
3001 * The 2nd operand is the first half of a register, which in the memory case
3002 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3003 * memory accessed for MMX.
3004 *
3005 * Exceptions type 4.
3006 */
3007FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3008{
3009 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3010 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3011 {
3012 /*
3013 * Register, register.
3014 */
3015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3016 IEM_MC_BEGIN(2, 0);
3017 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3018 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3019 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3020 IEM_MC_PREPARE_SSE_USAGE();
3021 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3022 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3023 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3024 IEM_MC_ADVANCE_RIP();
3025 IEM_MC_END();
3026 }
3027 else
3028 {
3029 /*
3030 * Register, memory.
3031 */
3032 IEM_MC_BEGIN(2, 2);
3033 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3034 IEM_MC_LOCAL(uint64_t, uSrc);
3035 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3036 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3037
3038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3040 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3041 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3042
3043 IEM_MC_PREPARE_SSE_USAGE();
3044 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3045 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3046
3047 IEM_MC_ADVANCE_RIP();
3048 IEM_MC_END();
3049 }
3050 return VINF_SUCCESS;
3051}
3052
3053
3054/**
3055 * Common worker for SSE2 instructions on the forms:
3056 * pxxxx xmm1, xmm2/mem128
3057 *
3058 * The 2nd operand is the first half of a register, which in the memory case
3059 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3060 * memory accessed for MMX.
3061 *
3062 * Exceptions type 4.
3063 */
3064FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3065{
3066 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3067 if (!pImpl->pfnU64)
3068 return IEMOP_RAISE_INVALID_OPCODE();
3069 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3070 {
3071 /*
3072 * Register, register.
3073 */
3074 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3075 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3077 IEM_MC_BEGIN(2, 0);
3078 IEM_MC_ARG(uint64_t *, pDst, 0);
3079 IEM_MC_ARG(uint32_t const *, pSrc, 1);
3080 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3081 IEM_MC_PREPARE_FPU_USAGE();
3082 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3083 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3084 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3085 IEM_MC_ADVANCE_RIP();
3086 IEM_MC_END();
3087 }
3088 else
3089 {
3090 /*
3091 * Register, memory.
3092 */
3093 IEM_MC_BEGIN(2, 2);
3094 IEM_MC_ARG(uint64_t *, pDst, 0);
3095 IEM_MC_LOCAL(uint32_t, uSrc);
3096 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3098
3099 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3101 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3102 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3103
3104 IEM_MC_PREPARE_FPU_USAGE();
3105 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3106 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3107
3108 IEM_MC_ADVANCE_RIP();
3109 IEM_MC_END();
3110 }
3111 return VINF_SUCCESS;
3112}
3113
3114
3115/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3116FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3117{
3118 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3119 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3120}
3121
3122/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3123FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3124{
3125 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3126 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3127}
3128
3129/* Opcode 0xf3 0x0f 0x60 - invalid */
3130
3131
3132/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3133FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3134{
3135 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3136 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3137}
3138
3139/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3140FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3141{
3142 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3143 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3144}
3145
3146/* Opcode 0xf3 0x0f 0x61 - invalid */
3147
3148
3149/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3150FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3151{
3152 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3153 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3154}
3155
3156/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3157FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3158{
3159 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3160 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3161}
3162
3163/* Opcode 0xf3 0x0f 0x62 - invalid */
3164
3165
3166
3167/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3168FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3169/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3170FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3171/* Opcode 0xf3 0x0f 0x63 - invalid */
3172
3173/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3174FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3175/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3176FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3177/* Opcode 0xf3 0x0f 0x64 - invalid */
3178
3179/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3180FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3181/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3182FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3183/* Opcode 0xf3 0x0f 0x65 - invalid */
3184
3185/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3186FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3187/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3188FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3189/* Opcode 0xf3 0x0f 0x66 - invalid */
3190
3191/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3192FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3193/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3194FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3195/* Opcode 0xf3 0x0f 0x67 - invalid */
3196
3197
3198/**
3199 * Common worker for MMX instructions on the form:
3200 * pxxxx mm1, mm2/mem64
3201 *
3202 * The 2nd operand is the second half of a register, which in the memory case
3203 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3204 * where it may read the full 128 bits or only the upper 64 bits.
3205 *
3206 * Exceptions type 4.
3207 */
3208FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3209{
3210 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3211 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3212 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3213 {
3214 /*
3215 * Register, register.
3216 */
3217 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3218 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3220 IEM_MC_BEGIN(2, 0);
3221 IEM_MC_ARG(uint64_t *, pDst, 0);
3222 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3223 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3224 IEM_MC_PREPARE_FPU_USAGE();
3225 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3226 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3227 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3228 IEM_MC_ADVANCE_RIP();
3229 IEM_MC_END();
3230 }
3231 else
3232 {
3233 /*
3234 * Register, memory.
3235 */
3236 IEM_MC_BEGIN(2, 2);
3237 IEM_MC_ARG(uint64_t *, pDst, 0);
3238 IEM_MC_LOCAL(uint64_t, uSrc);
3239 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3241
3242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3244 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3245 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3246
3247 IEM_MC_PREPARE_FPU_USAGE();
3248 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3249 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3250
3251 IEM_MC_ADVANCE_RIP();
3252 IEM_MC_END();
3253 }
3254 return VINF_SUCCESS;
3255}
3256
3257
3258/**
3259 * Common worker for SSE2 instructions on the form:
3260 * pxxxx xmm1, xmm2/mem128
3261 *
3262 * The 2nd operand is the second half of a register, which in the memory case
3263 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3264 * where it may read the full 128 bits or only the upper 64 bits.
3265 *
3266 * Exceptions type 4.
3267 */
3268FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3269{
3270 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3271 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3272 {
3273 /*
3274 * Register, register.
3275 */
3276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3277 IEM_MC_BEGIN(2, 0);
3278 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3279 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3280 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3281 IEM_MC_PREPARE_SSE_USAGE();
3282 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3283 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3284 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3285 IEM_MC_ADVANCE_RIP();
3286 IEM_MC_END();
3287 }
3288 else
3289 {
3290 /*
3291 * Register, memory.
3292 */
3293 IEM_MC_BEGIN(2, 2);
3294 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3295 IEM_MC_LOCAL(RTUINT128U, uSrc);
3296 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3298
3299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3301 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3302 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3303
3304 IEM_MC_PREPARE_SSE_USAGE();
3305 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3306 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3307
3308 IEM_MC_ADVANCE_RIP();
3309 IEM_MC_END();
3310 }
3311 return VINF_SUCCESS;
3312}
3313
3314
3315/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3316FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3317{
3318 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3319 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3320}
3321
3322/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3323FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3324{
3325 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3326 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3327}
3328/* Opcode 0xf3 0x0f 0x68 - invalid */
3329
3330
3331/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3332FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3333{
3334 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3335 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3336}
3337
3338/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3339FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3340{
3341 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3342 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3343
3344}
3345/* Opcode 0xf3 0x0f 0x69 - invalid */
3346
3347
3348/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3349FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3350{
3351 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3352 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3353}
3354
3355/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3356FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3357{
3358 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3359 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3360}
3361/* Opcode 0xf3 0x0f 0x6a - invalid */
3362
3363
3364/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3365FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3366/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3367FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3368/* Opcode 0xf3 0x0f 0x6b - invalid */
3369
3370
3371/* Opcode 0x0f 0x6c - invalid */
3372
3373/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3374FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3375{
3376 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3377 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3378}
3379
3380/* Opcode 0xf3 0x0f 0x6c - invalid */
3381/* Opcode 0xf2 0x0f 0x6c - invalid */
3382
3383
3384/* Opcode 0x0f 0x6d - invalid */
3385
3386/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3387FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3388{
3389 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3390 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3391}
3392
3393/* Opcode 0xf3 0x0f 0x6d - invalid */
3394
3395
3396FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3397{
3398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3399 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3400 {
3401 /**
3402 * @opcode 0x6e
3403 * @opcodesub rex.w=1
3404 * @oppfx none
3405 * @opcpuid mmx
3406 * @opgroup og_mmx_datamove
3407 * @opxcpttype 5
3408 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
3409 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
3410 */
3411 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3412 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3413 {
3414 /* MMX, greg64 */
3415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3416 IEM_MC_BEGIN(0, 1);
3417 IEM_MC_LOCAL(uint64_t, u64Tmp);
3418
3419 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3420 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3421
3422 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3423 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3424 IEM_MC_FPU_TO_MMX_MODE();
3425
3426 IEM_MC_ADVANCE_RIP();
3427 IEM_MC_END();
3428 }
3429 else
3430 {
3431 /* MMX, [mem64] */
3432 IEM_MC_BEGIN(0, 2);
3433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3434 IEM_MC_LOCAL(uint64_t, u64Tmp);
3435
3436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3438 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3439 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3440
3441 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3442 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3443 IEM_MC_FPU_TO_MMX_MODE();
3444
3445 IEM_MC_ADVANCE_RIP();
3446 IEM_MC_END();
3447 }
3448 }
3449 else
3450 {
3451 /**
3452 * @opdone
3453 * @opcode 0x6e
3454 * @opcodesub rex.w=0
3455 * @oppfx none
3456 * @opcpuid mmx
3457 * @opgroup og_mmx_datamove
3458 * @opxcpttype 5
3459 * @opfunction iemOp_movd_q_Pd_Ey
3460 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3461 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3462 */
3463 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3464 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3465 {
3466 /* MMX, greg */
3467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3468 IEM_MC_BEGIN(0, 1);
3469 IEM_MC_LOCAL(uint64_t, u64Tmp);
3470
3471 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3472 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3473
3474 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3475 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3476 IEM_MC_FPU_TO_MMX_MODE();
3477
3478 IEM_MC_ADVANCE_RIP();
3479 IEM_MC_END();
3480 }
3481 else
3482 {
3483 /* MMX, [mem] */
3484 IEM_MC_BEGIN(0, 2);
3485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3486 IEM_MC_LOCAL(uint32_t, u32Tmp);
3487
3488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3490 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3491 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3492
3493 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3494 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3495 IEM_MC_FPU_TO_MMX_MODE();
3496
3497 IEM_MC_ADVANCE_RIP();
3498 IEM_MC_END();
3499 }
3500 }
3501 return VINF_SUCCESS;
3502}
3503
3504FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3505{
3506 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3507 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3508 {
3509 /**
3510 * @opcode 0x6e
3511 * @opcodesub rex.w=1
3512 * @oppfx 0x66
3513 * @opcpuid sse2
3514 * @opgroup og_sse2_simdint_datamove
3515 * @opxcpttype 5
3516 * @optest 64-bit / op1=1 op2=2 -> op1=2
3517 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3518 */
3519 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3521 {
3522 /* XMM, greg64 */
3523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3524 IEM_MC_BEGIN(0, 1);
3525 IEM_MC_LOCAL(uint64_t, u64Tmp);
3526
3527 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3528 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3529
3530 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3531 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3532
3533 IEM_MC_ADVANCE_RIP();
3534 IEM_MC_END();
3535 }
3536 else
3537 {
3538 /* XMM, [mem64] */
3539 IEM_MC_BEGIN(0, 2);
3540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3541 IEM_MC_LOCAL(uint64_t, u64Tmp);
3542
3543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3545 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3546 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3547
3548 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3549 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3550
3551 IEM_MC_ADVANCE_RIP();
3552 IEM_MC_END();
3553 }
3554 }
3555 else
3556 {
3557 /**
3558 * @opdone
3559 * @opcode 0x6e
3560 * @opcodesub rex.w=0
3561 * @oppfx 0x66
3562 * @opcpuid sse2
3563 * @opgroup og_sse2_simdint_datamove
3564 * @opxcpttype 5
3565 * @opfunction iemOp_movd_q_Vy_Ey
3566 * @optest op1=1 op2=2 -> op1=2
3567 * @optest op1=0 op2=-42 -> op1=-42
3568 */
3569 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3570 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3571 {
3572 /* XMM, greg32 */
3573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3574 IEM_MC_BEGIN(0, 1);
3575 IEM_MC_LOCAL(uint32_t, u32Tmp);
3576
3577 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3578 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3579
3580 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3581 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3582
3583 IEM_MC_ADVANCE_RIP();
3584 IEM_MC_END();
3585 }
3586 else
3587 {
3588 /* XMM, [mem32] */
3589 IEM_MC_BEGIN(0, 2);
3590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3591 IEM_MC_LOCAL(uint32_t, u32Tmp);
3592
3593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3595 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3596 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3597
3598 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3599 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3600
3601 IEM_MC_ADVANCE_RIP();
3602 IEM_MC_END();
3603 }
3604 }
3605 return VINF_SUCCESS;
3606}
3607
3608/* Opcode 0xf3 0x0f 0x6e - invalid */
3609
3610
3611/**
3612 * @opcode 0x6f
3613 * @oppfx none
3614 * @opcpuid mmx
3615 * @opgroup og_mmx_datamove
3616 * @opxcpttype 5
3617 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3618 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3619 */
3620FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3621{
3622 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3623 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3624 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3625 {
3626 /*
3627 * Register, register.
3628 */
3629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3630 IEM_MC_BEGIN(0, 1);
3631 IEM_MC_LOCAL(uint64_t, u64Tmp);
3632
3633 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3634 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3635
3636 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3637 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3638 IEM_MC_FPU_TO_MMX_MODE();
3639
3640 IEM_MC_ADVANCE_RIP();
3641 IEM_MC_END();
3642 }
3643 else
3644 {
3645 /*
3646 * Register, memory.
3647 */
3648 IEM_MC_BEGIN(0, 2);
3649 IEM_MC_LOCAL(uint64_t, u64Tmp);
3650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3651
3652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3654 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3655 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3656
3657 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3658 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3659 IEM_MC_FPU_TO_MMX_MODE();
3660
3661 IEM_MC_ADVANCE_RIP();
3662 IEM_MC_END();
3663 }
3664 return VINF_SUCCESS;
3665}
3666
3667/**
3668 * @opcode 0x6f
3669 * @oppfx 0x66
3670 * @opcpuid sse2
3671 * @opgroup og_sse2_simdint_datamove
3672 * @opxcpttype 1
3673 * @optest op1=1 op2=2 -> op1=2
3674 * @optest op1=0 op2=-42 -> op1=-42
3675 */
3676FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
3677{
3678 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3680 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3681 {
3682 /*
3683 * Register, register.
3684 */
3685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3686 IEM_MC_BEGIN(0, 0);
3687
3688 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3689 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3690
3691 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3692 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3693 IEM_MC_ADVANCE_RIP();
3694 IEM_MC_END();
3695 }
3696 else
3697 {
3698 /*
3699 * Register, memory.
3700 */
3701 IEM_MC_BEGIN(0, 2);
3702 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3704
3705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3707 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3708 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3709
3710 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3711 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3712
3713 IEM_MC_ADVANCE_RIP();
3714 IEM_MC_END();
3715 }
3716 return VINF_SUCCESS;
3717}
3718
3719/**
3720 * @opcode 0x6f
3721 * @oppfx 0xf3
3722 * @opcpuid sse2
3723 * @opgroup og_sse2_simdint_datamove
3724 * @opxcpttype 4UA
3725 * @optest op1=1 op2=2 -> op1=2
3726 * @optest op1=0 op2=-42 -> op1=-42
3727 */
3728FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
3729{
3730 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3732 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3733 {
3734 /*
3735 * Register, register.
3736 */
3737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3738 IEM_MC_BEGIN(0, 0);
3739 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3740 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3741 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3742 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3743 IEM_MC_ADVANCE_RIP();
3744 IEM_MC_END();
3745 }
3746 else
3747 {
3748 /*
3749 * Register, memory.
3750 */
3751 IEM_MC_BEGIN(0, 2);
3752 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3754
3755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3757 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3758 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3759 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3760 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3761
3762 IEM_MC_ADVANCE_RIP();
3763 IEM_MC_END();
3764 }
3765 return VINF_SUCCESS;
3766}
3767
3768
3769/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3770FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3771{
3772 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3773 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3774 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3775 {
3776 /*
3777 * Register, register.
3778 */
3779 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3781
3782 IEM_MC_BEGIN(3, 0);
3783 IEM_MC_ARG(uint64_t *, pDst, 0);
3784 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3785 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3786 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3787 IEM_MC_PREPARE_FPU_USAGE();
3788 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3789 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3790 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3791 IEM_MC_ADVANCE_RIP();
3792 IEM_MC_END();
3793 }
3794 else
3795 {
3796 /*
3797 * Register, memory.
3798 */
3799 IEM_MC_BEGIN(3, 2);
3800 IEM_MC_ARG(uint64_t *, pDst, 0);
3801 IEM_MC_LOCAL(uint64_t, uSrc);
3802 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3803 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3804
3805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3806 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3807 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3809 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3810
3811 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3812 IEM_MC_PREPARE_FPU_USAGE();
3813 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3814 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3815
3816 IEM_MC_ADVANCE_RIP();
3817 IEM_MC_END();
3818 }
3819 return VINF_SUCCESS;
3820}
3821
3822/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3823FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3824{
3825 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3826 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3827 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3828 {
3829 /*
3830 * Register, register.
3831 */
3832 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3834
3835 IEM_MC_BEGIN(3, 0);
3836 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3837 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3838 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3839 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3840 IEM_MC_PREPARE_SSE_USAGE();
3841 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3842 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3843 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3844 IEM_MC_ADVANCE_RIP();
3845 IEM_MC_END();
3846 }
3847 else
3848 {
3849 /*
3850 * Register, memory.
3851 */
3852 IEM_MC_BEGIN(3, 2);
3853 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3854 IEM_MC_LOCAL(RTUINT128U, uSrc);
3855 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3856 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3857
3858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3859 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3860 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3862 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3863
3864 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3865 IEM_MC_PREPARE_SSE_USAGE();
3866 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3867 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3868
3869 IEM_MC_ADVANCE_RIP();
3870 IEM_MC_END();
3871 }
3872 return VINF_SUCCESS;
3873}
3874
3875/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3876FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3877{
3878 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3880 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3881 {
3882 /*
3883 * Register, register.
3884 */
3885 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3887
3888 IEM_MC_BEGIN(3, 0);
3889 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3890 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3891 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3892 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3893 IEM_MC_PREPARE_SSE_USAGE();
3894 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3895 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3896 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3897 IEM_MC_ADVANCE_RIP();
3898 IEM_MC_END();
3899 }
3900 else
3901 {
3902 /*
3903 * Register, memory.
3904 */
3905 IEM_MC_BEGIN(3, 2);
3906 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3907 IEM_MC_LOCAL(RTUINT128U, uSrc);
3908 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3910
3911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3912 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3913 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3915 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3916
3917 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3918 IEM_MC_PREPARE_SSE_USAGE();
3919 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3920 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3921
3922 IEM_MC_ADVANCE_RIP();
3923 IEM_MC_END();
3924 }
3925 return VINF_SUCCESS;
3926}
3927
3928/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3929FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3930{
3931 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3932 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3933 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3934 {
3935 /*
3936 * Register, register.
3937 */
3938 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3940
3941 IEM_MC_BEGIN(3, 0);
3942 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3943 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3944 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3945 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3946 IEM_MC_PREPARE_SSE_USAGE();
3947 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3948 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3949 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3950 IEM_MC_ADVANCE_RIP();
3951 IEM_MC_END();
3952 }
3953 else
3954 {
3955 /*
3956 * Register, memory.
3957 */
3958 IEM_MC_BEGIN(3, 2);
3959 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3960 IEM_MC_LOCAL(RTUINT128U, uSrc);
3961 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3963
3964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3965 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3966 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3968 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3969
3970 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3971 IEM_MC_PREPARE_SSE_USAGE();
3972 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3973 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3974
3975 IEM_MC_ADVANCE_RIP();
3976 IEM_MC_END();
3977 }
3978 return VINF_SUCCESS;
3979}
3980
3981
3982/** Opcode 0x0f 0x71 11/2. */
3983FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3984
3985/** Opcode 0x66 0x0f 0x71 11/2. */
3986FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3987
3988/** Opcode 0x0f 0x71 11/4. */
3989FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3990
3991/** Opcode 0x66 0x0f 0x71 11/4. */
3992FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3993
3994/** Opcode 0x0f 0x71 11/6. */
3995FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3996
3997/** Opcode 0x66 0x0f 0x71 11/6. */
3998FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3999
4000
4001/**
4002 * Group 12 jump table for register variant.
4003 */
4004IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
4005{
4006 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4007 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4008 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4009 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4010 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4011 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4012 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4013 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4014};
4015AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
4016
4017
4018/** Opcode 0x0f 0x71. */
4019FNIEMOP_DEF(iemOp_Grp12)
4020{
4021 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4022 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4023 /* register, register */
4024 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4025 + pVCpu->iem.s.idxPrefix], bRm);
4026 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4027}
4028
4029
4030/** Opcode 0x0f 0x72 11/2. */
4031FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
4032
4033/** Opcode 0x66 0x0f 0x72 11/2. */
4034FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
4035
4036/** Opcode 0x0f 0x72 11/4. */
4037FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
4038
4039/** Opcode 0x66 0x0f 0x72 11/4. */
4040FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
4041
4042/** Opcode 0x0f 0x72 11/6. */
4043FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
4044
4045/** Opcode 0x66 0x0f 0x72 11/6. */
4046FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
4047
4048
4049/**
4050 * Group 13 jump table for register variant.
4051 */
4052IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
4053{
4054 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4055 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4056 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4057 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4058 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4059 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4060 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4061 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4062};
4063AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
4064
4065/** Opcode 0x0f 0x72. */
4066FNIEMOP_DEF(iemOp_Grp13)
4067{
4068 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4069 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4070 /* register, register */
4071 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4072 + pVCpu->iem.s.idxPrefix], bRm);
4073 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4074}
4075
4076
4077/** Opcode 0x0f 0x73 11/2. */
4078FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
4079
4080/** Opcode 0x66 0x0f 0x73 11/2. */
4081FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
4082
4083/** Opcode 0x66 0x0f 0x73 11/3. */
4084FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
4085
4086/** Opcode 0x0f 0x73 11/6. */
4087FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
4088
4089/** Opcode 0x66 0x0f 0x73 11/6. */
4090FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
4091
4092/** Opcode 0x66 0x0f 0x73 11/7. */
4093FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
4094
4095/**
4096 * Group 14 jump table for register variant.
4097 */
4098IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4099{
4100 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4101 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4102 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4103 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4104 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4105 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4106 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4107 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4108};
4109AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4110
4111
4112/** Opcode 0x0f 0x73. */
4113FNIEMOP_DEF(iemOp_Grp14)
4114{
4115 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4116 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4117 /* register, register */
4118 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4119 + pVCpu->iem.s.idxPrefix], bRm);
4120 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4121}
4122
4123
4124/**
4125 * Common worker for MMX instructions on the form:
4126 * pxxx mm1, mm2/mem64
4127 */
4128FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4129{
4130 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4131 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4132 {
4133 /*
4134 * Register, register.
4135 */
4136 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4137 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4139 IEM_MC_BEGIN(2, 0);
4140 IEM_MC_ARG(uint64_t *, pDst, 0);
4141 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4142 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4143 IEM_MC_PREPARE_FPU_USAGE();
4144 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4145 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
4146 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4147 IEM_MC_ADVANCE_RIP();
4148 IEM_MC_END();
4149 }
4150 else
4151 {
4152 /*
4153 * Register, memory.
4154 */
4155 IEM_MC_BEGIN(2, 2);
4156 IEM_MC_ARG(uint64_t *, pDst, 0);
4157 IEM_MC_LOCAL(uint64_t, uSrc);
4158 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4160
4161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4163 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4164 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4165
4166 IEM_MC_PREPARE_FPU_USAGE();
4167 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4168 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4169
4170 IEM_MC_ADVANCE_RIP();
4171 IEM_MC_END();
4172 }
4173 return VINF_SUCCESS;
4174}
4175
4176
4177/**
4178 * Common worker for SSE2 instructions on the forms:
4179 * pxxx xmm1, xmm2/mem128
4180 *
4181 * Proper alignment of the 128-bit operand is enforced.
4182 * Exceptions type 4. SSE2 cpuid checks.
4183 */
4184FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4185{
4186 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4187 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4188 {
4189 /*
4190 * Register, register.
4191 */
4192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4193 IEM_MC_BEGIN(2, 0);
4194 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4195 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4196 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4197 IEM_MC_PREPARE_SSE_USAGE();
4198 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4199 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4200 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4201 IEM_MC_ADVANCE_RIP();
4202 IEM_MC_END();
4203 }
4204 else
4205 {
4206 /*
4207 * Register, memory.
4208 */
4209 IEM_MC_BEGIN(2, 2);
4210 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4211 IEM_MC_LOCAL(RTUINT128U, uSrc);
4212 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4214
4215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4217 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4218 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4219
4220 IEM_MC_PREPARE_SSE_USAGE();
4221 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4222 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4223
4224 IEM_MC_ADVANCE_RIP();
4225 IEM_MC_END();
4226 }
4227 return VINF_SUCCESS;
4228}
4229
4230
4231/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4232FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4233{
4234 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
4235 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4236}
4237
4238/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4239FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4240{
4241 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
4242 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4243}
4244
4245/* Opcode 0xf3 0x0f 0x74 - invalid */
4246/* Opcode 0xf2 0x0f 0x74 - invalid */
4247
4248
4249/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4250FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4251{
4252 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
4253 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4254}
4255
4256/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4257FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4258{
4259 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
4260 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4261}
4262
4263/* Opcode 0xf3 0x0f 0x75 - invalid */
4264/* Opcode 0xf2 0x0f 0x75 - invalid */
4265
4266
4267/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4268FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4269{
4270 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
4271 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4272}
4273
4274/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4275FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4276{
4277 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
4278 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4279}
4280
4281/* Opcode 0xf3 0x0f 0x76 - invalid */
4282/* Opcode 0xf2 0x0f 0x76 - invalid */
4283
4284
4285/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4286FNIEMOP_DEF(iemOp_emms)
4287{
4288 IEMOP_MNEMONIC(emms, "emms");
4289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4290
4291 IEM_MC_BEGIN(0,0);
4292 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
4293 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4294 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4295 IEM_MC_FPU_FROM_MMX_MODE();
4296 IEM_MC_ADVANCE_RIP();
4297 IEM_MC_END();
4298 return VINF_SUCCESS;
4299}
4300
4301/* Opcode 0x66 0x0f 0x77 - invalid */
4302/* Opcode 0xf3 0x0f 0x77 - invalid */
4303/* Opcode 0xf2 0x0f 0x77 - invalid */
4304
4305/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4306#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4307FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
4308{
4309 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
4310 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
4311 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
4312 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
4313
4314 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4316 {
4317 /*
4318 * Register, register.
4319 */
4320 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4321 if (enmEffOpSize == IEMMODE_64BIT)
4322 {
4323 IEM_MC_BEGIN(2, 0);
4324 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4325 IEM_MC_ARG(uint64_t, u64Enc, 1);
4326 IEM_MC_FETCH_GREG_U64(u64Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4327 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4328 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg64, pu64Dst, u64Enc);
4329 IEM_MC_END();
4330 }
4331 else
4332 {
4333 IEM_MC_BEGIN(2, 0);
4334 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4335 IEM_MC_ARG(uint32_t, u32Enc, 1);
4336 IEM_MC_FETCH_GREG_U32(u32Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4337 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4338 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg32, pu32Dst, u32Enc);
4339 IEM_MC_END();
4340 }
4341 }
4342 else
4343 {
4344 /*
4345 * Memory, register.
4346 */
4347 if (enmEffOpSize == IEMMODE_64BIT)
4348 {
4349 IEM_MC_BEGIN(4, 0);
4350 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4351 IEM_MC_ARG_CONST(IEMMODE, enmEffAddrMode,/*=*/pVCpu->iem.s.enmEffAddrMode, 1);
4352 IEM_MC_ARG(RTGCPTR, GCPtrVal, 2);
4353 IEM_MC_ARG(uint64_t, u64Enc, 3);
4354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4355 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4356 IEM_MC_FETCH_GREG_U64(u64Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4357 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4358 IEM_MC_CALL_CIMPL_4(iemCImpl_vmread_mem_reg64, iEffSeg, enmEffAddrMode, GCPtrVal, u64Enc);
4359 IEM_MC_END();
4360 }
4361 else
4362 {
4363 IEM_MC_BEGIN(4, 0);
4364 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4365 IEM_MC_ARG_CONST(IEMMODE, enmEffAddrMode,/*=*/pVCpu->iem.s.enmEffAddrMode, 1);
4366 IEM_MC_ARG(RTGCPTR, GCPtrVal, 2);
4367 IEM_MC_ARG(uint32_t, u32Enc, 3);
4368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4369 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4370 IEM_MC_FETCH_GREG_U32(u32Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4371 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4372 IEM_MC_CALL_CIMPL_4(iemCImpl_vmread_mem_reg32, iEffSeg, enmEffAddrMode, GCPtrVal, u32Enc);
4373 IEM_MC_END();
4374 }
4375 }
4376 return VINF_SUCCESS;
4377}
4378#else
4379FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4380#endif
4381
4382/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4383FNIEMOP_STUB(iemOp_AmdGrp17);
4384/* Opcode 0xf3 0x0f 0x78 - invalid */
4385/* Opcode 0xf2 0x0f 0x78 - invalid */
4386
4387/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4388#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4389FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
4390{
4391 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
4392 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
4393 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
4394 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
4395
4396 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4397 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4398 {
4399 /*
4400 * Register, register.
4401 */
4402 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4403 if (enmEffOpSize == IEMMODE_64BIT)
4404 {
4405 IEM_MC_BEGIN(2, 0);
4406 IEM_MC_ARG(uint64_t, u64Val, 0);
4407 IEM_MC_ARG(uint64_t, u64Enc, 1);
4408 IEM_MC_FETCH_GREG_U64(u64Val, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4409 IEM_MC_FETCH_GREG_U64(u64Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4410 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
4411 IEM_MC_END();
4412 }
4413 else
4414 {
4415 IEM_MC_BEGIN(2, 0);
4416 IEM_MC_ARG(uint32_t, u32Val, 0);
4417 IEM_MC_ARG(uint32_t, u32Enc, 1);
4418 IEM_MC_FETCH_GREG_U32(u32Val, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4419 IEM_MC_FETCH_GREG_U32(u32Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4420 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
4421 IEM_MC_END();
4422 }
4423 }
4424 else
4425 {
4426 /*
4427 * Register, memory.
4428 */
4429 if (enmEffOpSize == IEMMODE_64BIT)
4430 {
4431 IEM_MC_BEGIN(4, 0);
4432 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4433 IEM_MC_ARG_CONST(IEMMODE, enmEffAddrMode,/*=*/pVCpu->iem.s.enmEffAddrMode, 1);
4434 IEM_MC_ARG(RTGCPTR, GCPtrVal, 2);
4435 IEM_MC_ARG(uint64_t, u64Enc, 3);
4436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4437 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4438 IEM_MC_FETCH_GREG_U64(u64Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4439 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4440 IEM_MC_CALL_CIMPL_4(iemCImpl_vmwrite_mem, iEffSeg, enmEffAddrMode, GCPtrVal, u64Enc);
4441 IEM_MC_END();
4442 }
4443 else
4444 {
4445 IEM_MC_BEGIN(4, 0);
4446 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4447 IEM_MC_ARG_CONST(IEMMODE, enmEffAddrMode,/*=*/pVCpu->iem.s.enmEffAddrMode, 1);
4448 IEM_MC_ARG(RTGCPTR, GCPtrVal, 2);
4449 IEM_MC_ARG(uint32_t, u32Enc, 3);
4450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4451 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4452 IEM_MC_FETCH_GREG_U32(u32Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4453 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4454 IEM_MC_CALL_CIMPL_4(iemCImpl_vmwrite_mem, iEffSeg, enmEffAddrMode, GCPtrVal, u32Enc);
4455 IEM_MC_END();
4456 }
4457 }
4458 return VINF_SUCCESS;
4459}
4460#else
4461FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4462#endif
4463/* Opcode 0x66 0x0f 0x79 - invalid */
4464/* Opcode 0xf3 0x0f 0x79 - invalid */
4465/* Opcode 0xf2 0x0f 0x79 - invalid */
4466
4467/* Opcode 0x0f 0x7a - invalid */
4468/* Opcode 0x66 0x0f 0x7a - invalid */
4469/* Opcode 0xf3 0x0f 0x7a - invalid */
4470/* Opcode 0xf2 0x0f 0x7a - invalid */
4471
4472/* Opcode 0x0f 0x7b - invalid */
4473/* Opcode 0x66 0x0f 0x7b - invalid */
4474/* Opcode 0xf3 0x0f 0x7b - invalid */
4475/* Opcode 0xf2 0x0f 0x7b - invalid */
4476
4477/* Opcode 0x0f 0x7c - invalid */
4478/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4479FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4480/* Opcode 0xf3 0x0f 0x7c - invalid */
4481/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4482FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4483
4484/* Opcode 0x0f 0x7d - invalid */
4485/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4486FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4487/* Opcode 0xf3 0x0f 0x7d - invalid */
4488/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4489FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4490
4491
4492/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4493FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4494{
4495 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4496 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4497 {
4498 /**
4499 * @opcode 0x7e
4500 * @opcodesub rex.w=1
4501 * @oppfx none
4502 * @opcpuid mmx
4503 * @opgroup og_mmx_datamove
4504 * @opxcpttype 5
4505 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
4506 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
4507 */
4508 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4509 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4510 {
4511 /* greg64, MMX */
4512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4513 IEM_MC_BEGIN(0, 1);
4514 IEM_MC_LOCAL(uint64_t, u64Tmp);
4515
4516 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4517 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4518
4519 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4520 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4521 IEM_MC_FPU_TO_MMX_MODE();
4522
4523 IEM_MC_ADVANCE_RIP();
4524 IEM_MC_END();
4525 }
4526 else
4527 {
4528 /* [mem64], MMX */
4529 IEM_MC_BEGIN(0, 2);
4530 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4531 IEM_MC_LOCAL(uint64_t, u64Tmp);
4532
4533 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4535 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4536 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4537
4538 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4539 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4540 IEM_MC_FPU_TO_MMX_MODE();
4541
4542 IEM_MC_ADVANCE_RIP();
4543 IEM_MC_END();
4544 }
4545 }
4546 else
4547 {
4548 /**
4549 * @opdone
4550 * @opcode 0x7e
4551 * @opcodesub rex.w=0
4552 * @oppfx none
4553 * @opcpuid mmx
4554 * @opgroup og_mmx_datamove
4555 * @opxcpttype 5
4556 * @opfunction iemOp_movd_q_Pd_Ey
4557 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4558 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4559 */
4560 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4561 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4562 {
4563 /* greg32, MMX */
4564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4565 IEM_MC_BEGIN(0, 1);
4566 IEM_MC_LOCAL(uint32_t, u32Tmp);
4567
4568 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4569 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4570
4571 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4572 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4573 IEM_MC_FPU_TO_MMX_MODE();
4574
4575 IEM_MC_ADVANCE_RIP();
4576 IEM_MC_END();
4577 }
4578 else
4579 {
4580 /* [mem32], MMX */
4581 IEM_MC_BEGIN(0, 2);
4582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4583 IEM_MC_LOCAL(uint32_t, u32Tmp);
4584
4585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4587 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4588 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4589
4590 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4591 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4592 IEM_MC_FPU_TO_MMX_MODE();
4593
4594 IEM_MC_ADVANCE_RIP();
4595 IEM_MC_END();
4596 }
4597 }
4598 return VINF_SUCCESS;
4599
4600}
4601
4602
4603FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4604{
4605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4606 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4607 {
4608 /**
4609 * @opcode 0x7e
4610 * @opcodesub rex.w=1
4611 * @oppfx 0x66
4612 * @opcpuid sse2
4613 * @opgroup og_sse2_simdint_datamove
4614 * @opxcpttype 5
4615 * @optest 64-bit / op1=1 op2=2 -> op1=2
4616 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4617 */
4618 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4619 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4620 {
4621 /* greg64, XMM */
4622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4623 IEM_MC_BEGIN(0, 1);
4624 IEM_MC_LOCAL(uint64_t, u64Tmp);
4625
4626 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4627 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4628
4629 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4630 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4631
4632 IEM_MC_ADVANCE_RIP();
4633 IEM_MC_END();
4634 }
4635 else
4636 {
4637 /* [mem64], XMM */
4638 IEM_MC_BEGIN(0, 2);
4639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4640 IEM_MC_LOCAL(uint64_t, u64Tmp);
4641
4642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4644 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4645 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4646
4647 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4648 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4649
4650 IEM_MC_ADVANCE_RIP();
4651 IEM_MC_END();
4652 }
4653 }
4654 else
4655 {
4656 /**
4657 * @opdone
4658 * @opcode 0x7e
4659 * @opcodesub rex.w=0
4660 * @oppfx 0x66
4661 * @opcpuid sse2
4662 * @opgroup og_sse2_simdint_datamove
4663 * @opxcpttype 5
4664 * @opfunction iemOp_movd_q_Vy_Ey
4665 * @optest op1=1 op2=2 -> op1=2
4666 * @optest op1=0 op2=-42 -> op1=-42
4667 */
4668 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4669 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4670 {
4671 /* greg32, XMM */
4672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4673 IEM_MC_BEGIN(0, 1);
4674 IEM_MC_LOCAL(uint32_t, u32Tmp);
4675
4676 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4677 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4678
4679 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4680 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4681
4682 IEM_MC_ADVANCE_RIP();
4683 IEM_MC_END();
4684 }
4685 else
4686 {
4687 /* [mem32], XMM */
4688 IEM_MC_BEGIN(0, 2);
4689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4690 IEM_MC_LOCAL(uint32_t, u32Tmp);
4691
4692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4694 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4695 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4696
4697 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4698 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4699
4700 IEM_MC_ADVANCE_RIP();
4701 IEM_MC_END();
4702 }
4703 }
4704 return VINF_SUCCESS;
4705
4706}
4707
4708/**
4709 * @opcode 0x7e
4710 * @oppfx 0xf3
4711 * @opcpuid sse2
4712 * @opgroup og_sse2_pcksclr_datamove
4713 * @opxcpttype none
4714 * @optest op1=1 op2=2 -> op1=2
4715 * @optest op1=0 op2=-42 -> op1=-42
4716 */
4717FNIEMOP_DEF(iemOp_movq_Vq_Wq)
4718{
4719 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4720 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4721 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4722 {
4723 /*
4724 * Register, register.
4725 */
4726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4727 IEM_MC_BEGIN(0, 2);
4728 IEM_MC_LOCAL(uint64_t, uSrc);
4729
4730 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4731 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4732
4733 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4734 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4735
4736 IEM_MC_ADVANCE_RIP();
4737 IEM_MC_END();
4738 }
4739 else
4740 {
4741 /*
4742 * Memory, register.
4743 */
4744 IEM_MC_BEGIN(0, 2);
4745 IEM_MC_LOCAL(uint64_t, uSrc);
4746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4747
4748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4750 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4751 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4752
4753 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4754 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4755
4756 IEM_MC_ADVANCE_RIP();
4757 IEM_MC_END();
4758 }
4759 return VINF_SUCCESS;
4760}
4761
4762/* Opcode 0xf2 0x0f 0x7e - invalid */
4763
4764
4765/** Opcode 0x0f 0x7f - movq Qq, Pq */
4766FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4767{
4768 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4769 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4770 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4771 {
4772 /*
4773 * Register, register.
4774 */
4775 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4776 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4778 IEM_MC_BEGIN(0, 1);
4779 IEM_MC_LOCAL(uint64_t, u64Tmp);
4780 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4781 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4782 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4783 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4784 IEM_MC_ADVANCE_RIP();
4785 IEM_MC_END();
4786 }
4787 else
4788 {
4789 /*
4790 * Register, memory.
4791 */
4792 IEM_MC_BEGIN(0, 2);
4793 IEM_MC_LOCAL(uint64_t, u64Tmp);
4794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4795
4796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4798 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4799 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4800
4801 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4802 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4803
4804 IEM_MC_ADVANCE_RIP();
4805 IEM_MC_END();
4806 }
4807 return VINF_SUCCESS;
4808}
4809
4810/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4811FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4812{
4813 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4815 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4816 {
4817 /*
4818 * Register, register.
4819 */
4820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4821 IEM_MC_BEGIN(0, 0);
4822 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4823 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4824 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4825 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4826 IEM_MC_ADVANCE_RIP();
4827 IEM_MC_END();
4828 }
4829 else
4830 {
4831 /*
4832 * Register, memory.
4833 */
4834 IEM_MC_BEGIN(0, 2);
4835 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4837
4838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4840 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4841 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4842
4843 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4844 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4845
4846 IEM_MC_ADVANCE_RIP();
4847 IEM_MC_END();
4848 }
4849 return VINF_SUCCESS;
4850}
4851
4852/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4853FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4854{
4855 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4856 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4857 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4858 {
4859 /*
4860 * Register, register.
4861 */
4862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4863 IEM_MC_BEGIN(0, 0);
4864 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4865 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4866 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4867 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4868 IEM_MC_ADVANCE_RIP();
4869 IEM_MC_END();
4870 }
4871 else
4872 {
4873 /*
4874 * Register, memory.
4875 */
4876 IEM_MC_BEGIN(0, 2);
4877 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4879
4880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4882 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4883 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4884
4885 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4886 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4887
4888 IEM_MC_ADVANCE_RIP();
4889 IEM_MC_END();
4890 }
4891 return VINF_SUCCESS;
4892}
4893
4894/* Opcode 0xf2 0x0f 0x7f - invalid */
4895
4896
4897
4898/** Opcode 0x0f 0x80. */
4899FNIEMOP_DEF(iemOp_jo_Jv)
4900{
4901 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4902 IEMOP_HLP_MIN_386();
4903 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4904 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4905 {
4906 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4908
4909 IEM_MC_BEGIN(0, 0);
4910 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4911 IEM_MC_REL_JMP_S16(i16Imm);
4912 } IEM_MC_ELSE() {
4913 IEM_MC_ADVANCE_RIP();
4914 } IEM_MC_ENDIF();
4915 IEM_MC_END();
4916 }
4917 else
4918 {
4919 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4921
4922 IEM_MC_BEGIN(0, 0);
4923 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4924 IEM_MC_REL_JMP_S32(i32Imm);
4925 } IEM_MC_ELSE() {
4926 IEM_MC_ADVANCE_RIP();
4927 } IEM_MC_ENDIF();
4928 IEM_MC_END();
4929 }
4930 return VINF_SUCCESS;
4931}
4932
4933
4934/** Opcode 0x0f 0x81. */
4935FNIEMOP_DEF(iemOp_jno_Jv)
4936{
4937 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4938 IEMOP_HLP_MIN_386();
4939 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4940 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4941 {
4942 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4944
4945 IEM_MC_BEGIN(0, 0);
4946 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4947 IEM_MC_ADVANCE_RIP();
4948 } IEM_MC_ELSE() {
4949 IEM_MC_REL_JMP_S16(i16Imm);
4950 } IEM_MC_ENDIF();
4951 IEM_MC_END();
4952 }
4953 else
4954 {
4955 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4957
4958 IEM_MC_BEGIN(0, 0);
4959 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4960 IEM_MC_ADVANCE_RIP();
4961 } IEM_MC_ELSE() {
4962 IEM_MC_REL_JMP_S32(i32Imm);
4963 } IEM_MC_ENDIF();
4964 IEM_MC_END();
4965 }
4966 return VINF_SUCCESS;
4967}
4968
4969
4970/** Opcode 0x0f 0x82. */
4971FNIEMOP_DEF(iemOp_jc_Jv)
4972{
4973 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4974 IEMOP_HLP_MIN_386();
4975 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4976 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4977 {
4978 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4980
4981 IEM_MC_BEGIN(0, 0);
4982 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4983 IEM_MC_REL_JMP_S16(i16Imm);
4984 } IEM_MC_ELSE() {
4985 IEM_MC_ADVANCE_RIP();
4986 } IEM_MC_ENDIF();
4987 IEM_MC_END();
4988 }
4989 else
4990 {
4991 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4993
4994 IEM_MC_BEGIN(0, 0);
4995 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4996 IEM_MC_REL_JMP_S32(i32Imm);
4997 } IEM_MC_ELSE() {
4998 IEM_MC_ADVANCE_RIP();
4999 } IEM_MC_ENDIF();
5000 IEM_MC_END();
5001 }
5002 return VINF_SUCCESS;
5003}
5004
5005
5006/** Opcode 0x0f 0x83. */
5007FNIEMOP_DEF(iemOp_jnc_Jv)
5008{
5009 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
5010 IEMOP_HLP_MIN_386();
5011 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5012 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5013 {
5014 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5016
5017 IEM_MC_BEGIN(0, 0);
5018 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5019 IEM_MC_ADVANCE_RIP();
5020 } IEM_MC_ELSE() {
5021 IEM_MC_REL_JMP_S16(i16Imm);
5022 } IEM_MC_ENDIF();
5023 IEM_MC_END();
5024 }
5025 else
5026 {
5027 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5029
5030 IEM_MC_BEGIN(0, 0);
5031 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5032 IEM_MC_ADVANCE_RIP();
5033 } IEM_MC_ELSE() {
5034 IEM_MC_REL_JMP_S32(i32Imm);
5035 } IEM_MC_ENDIF();
5036 IEM_MC_END();
5037 }
5038 return VINF_SUCCESS;
5039}
5040
5041
5042/** Opcode 0x0f 0x84. */
5043FNIEMOP_DEF(iemOp_je_Jv)
5044{
5045 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
5046 IEMOP_HLP_MIN_386();
5047 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5048 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5049 {
5050 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5052
5053 IEM_MC_BEGIN(0, 0);
5054 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5055 IEM_MC_REL_JMP_S16(i16Imm);
5056 } IEM_MC_ELSE() {
5057 IEM_MC_ADVANCE_RIP();
5058 } IEM_MC_ENDIF();
5059 IEM_MC_END();
5060 }
5061 else
5062 {
5063 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5065
5066 IEM_MC_BEGIN(0, 0);
5067 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5068 IEM_MC_REL_JMP_S32(i32Imm);
5069 } IEM_MC_ELSE() {
5070 IEM_MC_ADVANCE_RIP();
5071 } IEM_MC_ENDIF();
5072 IEM_MC_END();
5073 }
5074 return VINF_SUCCESS;
5075}
5076
5077
5078/** Opcode 0x0f 0x85. */
5079FNIEMOP_DEF(iemOp_jne_Jv)
5080{
5081 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
5082 IEMOP_HLP_MIN_386();
5083 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5084 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5085 {
5086 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5088
5089 IEM_MC_BEGIN(0, 0);
5090 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5091 IEM_MC_ADVANCE_RIP();
5092 } IEM_MC_ELSE() {
5093 IEM_MC_REL_JMP_S16(i16Imm);
5094 } IEM_MC_ENDIF();
5095 IEM_MC_END();
5096 }
5097 else
5098 {
5099 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5101
5102 IEM_MC_BEGIN(0, 0);
5103 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5104 IEM_MC_ADVANCE_RIP();
5105 } IEM_MC_ELSE() {
5106 IEM_MC_REL_JMP_S32(i32Imm);
5107 } IEM_MC_ENDIF();
5108 IEM_MC_END();
5109 }
5110 return VINF_SUCCESS;
5111}
5112
5113
5114/** Opcode 0x0f 0x86. */
5115FNIEMOP_DEF(iemOp_jbe_Jv)
5116{
5117 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
5118 IEMOP_HLP_MIN_386();
5119 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5120 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5121 {
5122 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5124
5125 IEM_MC_BEGIN(0, 0);
5126 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5127 IEM_MC_REL_JMP_S16(i16Imm);
5128 } IEM_MC_ELSE() {
5129 IEM_MC_ADVANCE_RIP();
5130 } IEM_MC_ENDIF();
5131 IEM_MC_END();
5132 }
5133 else
5134 {
5135 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5137
5138 IEM_MC_BEGIN(0, 0);
5139 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5140 IEM_MC_REL_JMP_S32(i32Imm);
5141 } IEM_MC_ELSE() {
5142 IEM_MC_ADVANCE_RIP();
5143 } IEM_MC_ENDIF();
5144 IEM_MC_END();
5145 }
5146 return VINF_SUCCESS;
5147}
5148
5149
5150/** Opcode 0x0f 0x87. */
5151FNIEMOP_DEF(iemOp_jnbe_Jv)
5152{
5153 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
5154 IEMOP_HLP_MIN_386();
5155 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5156 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5157 {
5158 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5160
5161 IEM_MC_BEGIN(0, 0);
5162 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5163 IEM_MC_ADVANCE_RIP();
5164 } IEM_MC_ELSE() {
5165 IEM_MC_REL_JMP_S16(i16Imm);
5166 } IEM_MC_ENDIF();
5167 IEM_MC_END();
5168 }
5169 else
5170 {
5171 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5173
5174 IEM_MC_BEGIN(0, 0);
5175 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5176 IEM_MC_ADVANCE_RIP();
5177 } IEM_MC_ELSE() {
5178 IEM_MC_REL_JMP_S32(i32Imm);
5179 } IEM_MC_ENDIF();
5180 IEM_MC_END();
5181 }
5182 return VINF_SUCCESS;
5183}
5184
5185
5186/** Opcode 0x0f 0x88. */
5187FNIEMOP_DEF(iemOp_js_Jv)
5188{
5189 IEMOP_MNEMONIC(js_Jv, "js Jv");
5190 IEMOP_HLP_MIN_386();
5191 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5192 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5193 {
5194 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5196
5197 IEM_MC_BEGIN(0, 0);
5198 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5199 IEM_MC_REL_JMP_S16(i16Imm);
5200 } IEM_MC_ELSE() {
5201 IEM_MC_ADVANCE_RIP();
5202 } IEM_MC_ENDIF();
5203 IEM_MC_END();
5204 }
5205 else
5206 {
5207 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5209
5210 IEM_MC_BEGIN(0, 0);
5211 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5212 IEM_MC_REL_JMP_S32(i32Imm);
5213 } IEM_MC_ELSE() {
5214 IEM_MC_ADVANCE_RIP();
5215 } IEM_MC_ENDIF();
5216 IEM_MC_END();
5217 }
5218 return VINF_SUCCESS;
5219}
5220
5221
5222/** Opcode 0x0f 0x89. */
5223FNIEMOP_DEF(iemOp_jns_Jv)
5224{
5225 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
5226 IEMOP_HLP_MIN_386();
5227 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5228 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5229 {
5230 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5232
5233 IEM_MC_BEGIN(0, 0);
5234 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5235 IEM_MC_ADVANCE_RIP();
5236 } IEM_MC_ELSE() {
5237 IEM_MC_REL_JMP_S16(i16Imm);
5238 } IEM_MC_ENDIF();
5239 IEM_MC_END();
5240 }
5241 else
5242 {
5243 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5245
5246 IEM_MC_BEGIN(0, 0);
5247 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5248 IEM_MC_ADVANCE_RIP();
5249 } IEM_MC_ELSE() {
5250 IEM_MC_REL_JMP_S32(i32Imm);
5251 } IEM_MC_ENDIF();
5252 IEM_MC_END();
5253 }
5254 return VINF_SUCCESS;
5255}
5256
5257
5258/** Opcode 0x0f 0x8a. */
5259FNIEMOP_DEF(iemOp_jp_Jv)
5260{
5261 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
5262 IEMOP_HLP_MIN_386();
5263 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5264 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5265 {
5266 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5268
5269 IEM_MC_BEGIN(0, 0);
5270 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5271 IEM_MC_REL_JMP_S16(i16Imm);
5272 } IEM_MC_ELSE() {
5273 IEM_MC_ADVANCE_RIP();
5274 } IEM_MC_ENDIF();
5275 IEM_MC_END();
5276 }
5277 else
5278 {
5279 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5281
5282 IEM_MC_BEGIN(0, 0);
5283 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5284 IEM_MC_REL_JMP_S32(i32Imm);
5285 } IEM_MC_ELSE() {
5286 IEM_MC_ADVANCE_RIP();
5287 } IEM_MC_ENDIF();
5288 IEM_MC_END();
5289 }
5290 return VINF_SUCCESS;
5291}
5292
5293
5294/** Opcode 0x0f 0x8b. */
5295FNIEMOP_DEF(iemOp_jnp_Jv)
5296{
5297 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
5298 IEMOP_HLP_MIN_386();
5299 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5300 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5301 {
5302 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5304
5305 IEM_MC_BEGIN(0, 0);
5306 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5307 IEM_MC_ADVANCE_RIP();
5308 } IEM_MC_ELSE() {
5309 IEM_MC_REL_JMP_S16(i16Imm);
5310 } IEM_MC_ENDIF();
5311 IEM_MC_END();
5312 }
5313 else
5314 {
5315 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5317
5318 IEM_MC_BEGIN(0, 0);
5319 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5320 IEM_MC_ADVANCE_RIP();
5321 } IEM_MC_ELSE() {
5322 IEM_MC_REL_JMP_S32(i32Imm);
5323 } IEM_MC_ENDIF();
5324 IEM_MC_END();
5325 }
5326 return VINF_SUCCESS;
5327}
5328
5329
5330/** Opcode 0x0f 0x8c. */
5331FNIEMOP_DEF(iemOp_jl_Jv)
5332{
5333 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
5334 IEMOP_HLP_MIN_386();
5335 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5336 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5337 {
5338 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5340
5341 IEM_MC_BEGIN(0, 0);
5342 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5343 IEM_MC_REL_JMP_S16(i16Imm);
5344 } IEM_MC_ELSE() {
5345 IEM_MC_ADVANCE_RIP();
5346 } IEM_MC_ENDIF();
5347 IEM_MC_END();
5348 }
5349 else
5350 {
5351 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5353
5354 IEM_MC_BEGIN(0, 0);
5355 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5356 IEM_MC_REL_JMP_S32(i32Imm);
5357 } IEM_MC_ELSE() {
5358 IEM_MC_ADVANCE_RIP();
5359 } IEM_MC_ENDIF();
5360 IEM_MC_END();
5361 }
5362 return VINF_SUCCESS;
5363}
5364
5365
5366/** Opcode 0x0f 0x8d. */
5367FNIEMOP_DEF(iemOp_jnl_Jv)
5368{
5369 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5370 IEMOP_HLP_MIN_386();
5371 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5372 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5373 {
5374 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5376
5377 IEM_MC_BEGIN(0, 0);
5378 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5379 IEM_MC_ADVANCE_RIP();
5380 } IEM_MC_ELSE() {
5381 IEM_MC_REL_JMP_S16(i16Imm);
5382 } IEM_MC_ENDIF();
5383 IEM_MC_END();
5384 }
5385 else
5386 {
5387 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5389
5390 IEM_MC_BEGIN(0, 0);
5391 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5392 IEM_MC_ADVANCE_RIP();
5393 } IEM_MC_ELSE() {
5394 IEM_MC_REL_JMP_S32(i32Imm);
5395 } IEM_MC_ENDIF();
5396 IEM_MC_END();
5397 }
5398 return VINF_SUCCESS;
5399}
5400
5401
5402/** Opcode 0x0f 0x8e. */
5403FNIEMOP_DEF(iemOp_jle_Jv)
5404{
5405 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5406 IEMOP_HLP_MIN_386();
5407 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5408 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5409 {
5410 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5412
5413 IEM_MC_BEGIN(0, 0);
5414 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5415 IEM_MC_REL_JMP_S16(i16Imm);
5416 } IEM_MC_ELSE() {
5417 IEM_MC_ADVANCE_RIP();
5418 } IEM_MC_ENDIF();
5419 IEM_MC_END();
5420 }
5421 else
5422 {
5423 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5425
5426 IEM_MC_BEGIN(0, 0);
5427 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5428 IEM_MC_REL_JMP_S32(i32Imm);
5429 } IEM_MC_ELSE() {
5430 IEM_MC_ADVANCE_RIP();
5431 } IEM_MC_ENDIF();
5432 IEM_MC_END();
5433 }
5434 return VINF_SUCCESS;
5435}
5436
5437
5438/** Opcode 0x0f 0x8f. */
5439FNIEMOP_DEF(iemOp_jnle_Jv)
5440{
5441 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5442 IEMOP_HLP_MIN_386();
5443 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5444 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5445 {
5446 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5448
5449 IEM_MC_BEGIN(0, 0);
5450 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5451 IEM_MC_ADVANCE_RIP();
5452 } IEM_MC_ELSE() {
5453 IEM_MC_REL_JMP_S16(i16Imm);
5454 } IEM_MC_ENDIF();
5455 IEM_MC_END();
5456 }
5457 else
5458 {
5459 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5461
5462 IEM_MC_BEGIN(0, 0);
5463 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5464 IEM_MC_ADVANCE_RIP();
5465 } IEM_MC_ELSE() {
5466 IEM_MC_REL_JMP_S32(i32Imm);
5467 } IEM_MC_ENDIF();
5468 IEM_MC_END();
5469 }
5470 return VINF_SUCCESS;
5471}
5472
5473
5474/** Opcode 0x0f 0x90. */
5475FNIEMOP_DEF(iemOp_seto_Eb)
5476{
5477 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5478 IEMOP_HLP_MIN_386();
5479 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5480
5481 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5482 * any way. AMD says it's "unused", whatever that means. We're
5483 * ignoring for now. */
5484 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5485 {
5486 /* register target */
5487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5488 IEM_MC_BEGIN(0, 0);
5489 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5490 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5491 } IEM_MC_ELSE() {
5492 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5493 } IEM_MC_ENDIF();
5494 IEM_MC_ADVANCE_RIP();
5495 IEM_MC_END();
5496 }
5497 else
5498 {
5499 /* memory target */
5500 IEM_MC_BEGIN(0, 1);
5501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5502 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5504 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5505 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5506 } IEM_MC_ELSE() {
5507 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5508 } IEM_MC_ENDIF();
5509 IEM_MC_ADVANCE_RIP();
5510 IEM_MC_END();
5511 }
5512 return VINF_SUCCESS;
5513}
5514
5515
5516/** Opcode 0x0f 0x91. */
5517FNIEMOP_DEF(iemOp_setno_Eb)
5518{
5519 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5520 IEMOP_HLP_MIN_386();
5521 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5522
5523 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5524 * any way. AMD says it's "unused", whatever that means. We're
5525 * ignoring for now. */
5526 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5527 {
5528 /* register target */
5529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5530 IEM_MC_BEGIN(0, 0);
5531 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5532 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5533 } IEM_MC_ELSE() {
5534 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5535 } IEM_MC_ENDIF();
5536 IEM_MC_ADVANCE_RIP();
5537 IEM_MC_END();
5538 }
5539 else
5540 {
5541 /* memory target */
5542 IEM_MC_BEGIN(0, 1);
5543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5546 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5547 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5548 } IEM_MC_ELSE() {
5549 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5550 } IEM_MC_ENDIF();
5551 IEM_MC_ADVANCE_RIP();
5552 IEM_MC_END();
5553 }
5554 return VINF_SUCCESS;
5555}
5556
5557
5558/** Opcode 0x0f 0x92. */
5559FNIEMOP_DEF(iemOp_setc_Eb)
5560{
5561 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5562 IEMOP_HLP_MIN_386();
5563 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5564
5565 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5566 * any way. AMD says it's "unused", whatever that means. We're
5567 * ignoring for now. */
5568 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5569 {
5570 /* register target */
5571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5572 IEM_MC_BEGIN(0, 0);
5573 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5574 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5575 } IEM_MC_ELSE() {
5576 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5577 } IEM_MC_ENDIF();
5578 IEM_MC_ADVANCE_RIP();
5579 IEM_MC_END();
5580 }
5581 else
5582 {
5583 /* memory target */
5584 IEM_MC_BEGIN(0, 1);
5585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5588 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5589 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5590 } IEM_MC_ELSE() {
5591 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5592 } IEM_MC_ENDIF();
5593 IEM_MC_ADVANCE_RIP();
5594 IEM_MC_END();
5595 }
5596 return VINF_SUCCESS;
5597}
5598
5599
5600/** Opcode 0x0f 0x93. */
5601FNIEMOP_DEF(iemOp_setnc_Eb)
5602{
5603 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5604 IEMOP_HLP_MIN_386();
5605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5606
5607 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5608 * any way. AMD says it's "unused", whatever that means. We're
5609 * ignoring for now. */
5610 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5611 {
5612 /* register target */
5613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5614 IEM_MC_BEGIN(0, 0);
5615 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5616 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5617 } IEM_MC_ELSE() {
5618 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5619 } IEM_MC_ENDIF();
5620 IEM_MC_ADVANCE_RIP();
5621 IEM_MC_END();
5622 }
5623 else
5624 {
5625 /* memory target */
5626 IEM_MC_BEGIN(0, 1);
5627 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5630 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5631 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5632 } IEM_MC_ELSE() {
5633 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5634 } IEM_MC_ENDIF();
5635 IEM_MC_ADVANCE_RIP();
5636 IEM_MC_END();
5637 }
5638 return VINF_SUCCESS;
5639}
5640
5641
5642/** Opcode 0x0f 0x94. */
5643FNIEMOP_DEF(iemOp_sete_Eb)
5644{
5645 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5646 IEMOP_HLP_MIN_386();
5647 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5648
5649 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5650 * any way. AMD says it's "unused", whatever that means. We're
5651 * ignoring for now. */
5652 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5653 {
5654 /* register target */
5655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5656 IEM_MC_BEGIN(0, 0);
5657 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5658 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5659 } IEM_MC_ELSE() {
5660 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5661 } IEM_MC_ENDIF();
5662 IEM_MC_ADVANCE_RIP();
5663 IEM_MC_END();
5664 }
5665 else
5666 {
5667 /* memory target */
5668 IEM_MC_BEGIN(0, 1);
5669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5672 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5673 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5674 } IEM_MC_ELSE() {
5675 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5676 } IEM_MC_ENDIF();
5677 IEM_MC_ADVANCE_RIP();
5678 IEM_MC_END();
5679 }
5680 return VINF_SUCCESS;
5681}
5682
5683
5684/** Opcode 0x0f 0x95. */
5685FNIEMOP_DEF(iemOp_setne_Eb)
5686{
5687 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5688 IEMOP_HLP_MIN_386();
5689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5690
5691 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5692 * any way. AMD says it's "unused", whatever that means. We're
5693 * ignoring for now. */
5694 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5695 {
5696 /* register target */
5697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5698 IEM_MC_BEGIN(0, 0);
5699 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5700 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5701 } IEM_MC_ELSE() {
5702 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5703 } IEM_MC_ENDIF();
5704 IEM_MC_ADVANCE_RIP();
5705 IEM_MC_END();
5706 }
5707 else
5708 {
5709 /* memory target */
5710 IEM_MC_BEGIN(0, 1);
5711 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5714 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5715 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5716 } IEM_MC_ELSE() {
5717 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5718 } IEM_MC_ENDIF();
5719 IEM_MC_ADVANCE_RIP();
5720 IEM_MC_END();
5721 }
5722 return VINF_SUCCESS;
5723}
5724
5725
5726/** Opcode 0x0f 0x96. */
5727FNIEMOP_DEF(iemOp_setbe_Eb)
5728{
5729 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5730 IEMOP_HLP_MIN_386();
5731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5732
5733 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5734 * any way. AMD says it's "unused", whatever that means. We're
5735 * ignoring for now. */
5736 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5737 {
5738 /* register target */
5739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5740 IEM_MC_BEGIN(0, 0);
5741 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5742 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5743 } IEM_MC_ELSE() {
5744 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5745 } IEM_MC_ENDIF();
5746 IEM_MC_ADVANCE_RIP();
5747 IEM_MC_END();
5748 }
5749 else
5750 {
5751 /* memory target */
5752 IEM_MC_BEGIN(0, 1);
5753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5756 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5757 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5758 } IEM_MC_ELSE() {
5759 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5760 } IEM_MC_ENDIF();
5761 IEM_MC_ADVANCE_RIP();
5762 IEM_MC_END();
5763 }
5764 return VINF_SUCCESS;
5765}
5766
5767
5768/** Opcode 0x0f 0x97. */
5769FNIEMOP_DEF(iemOp_setnbe_Eb)
5770{
5771 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5772 IEMOP_HLP_MIN_386();
5773 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5774
5775 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5776 * any way. AMD says it's "unused", whatever that means. We're
5777 * ignoring for now. */
5778 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5779 {
5780 /* register target */
5781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5782 IEM_MC_BEGIN(0, 0);
5783 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5784 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5785 } IEM_MC_ELSE() {
5786 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5787 } IEM_MC_ENDIF();
5788 IEM_MC_ADVANCE_RIP();
5789 IEM_MC_END();
5790 }
5791 else
5792 {
5793 /* memory target */
5794 IEM_MC_BEGIN(0, 1);
5795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5798 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5799 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5800 } IEM_MC_ELSE() {
5801 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5802 } IEM_MC_ENDIF();
5803 IEM_MC_ADVANCE_RIP();
5804 IEM_MC_END();
5805 }
5806 return VINF_SUCCESS;
5807}
5808
5809
5810/** Opcode 0x0f 0x98. */
5811FNIEMOP_DEF(iemOp_sets_Eb)
5812{
5813 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5814 IEMOP_HLP_MIN_386();
5815 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5816
5817 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5818 * any way. AMD says it's "unused", whatever that means. We're
5819 * ignoring for now. */
5820 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5821 {
5822 /* register target */
5823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5824 IEM_MC_BEGIN(0, 0);
5825 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5826 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5827 } IEM_MC_ELSE() {
5828 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5829 } IEM_MC_ENDIF();
5830 IEM_MC_ADVANCE_RIP();
5831 IEM_MC_END();
5832 }
5833 else
5834 {
5835 /* memory target */
5836 IEM_MC_BEGIN(0, 1);
5837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5840 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5841 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5842 } IEM_MC_ELSE() {
5843 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5844 } IEM_MC_ENDIF();
5845 IEM_MC_ADVANCE_RIP();
5846 IEM_MC_END();
5847 }
5848 return VINF_SUCCESS;
5849}
5850
5851
5852/** Opcode 0x0f 0x99. */
5853FNIEMOP_DEF(iemOp_setns_Eb)
5854{
5855 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5856 IEMOP_HLP_MIN_386();
5857 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5858
5859 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5860 * any way. AMD says it's "unused", whatever that means. We're
5861 * ignoring for now. */
5862 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5863 {
5864 /* register target */
5865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5866 IEM_MC_BEGIN(0, 0);
5867 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5868 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5869 } IEM_MC_ELSE() {
5870 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5871 } IEM_MC_ENDIF();
5872 IEM_MC_ADVANCE_RIP();
5873 IEM_MC_END();
5874 }
5875 else
5876 {
5877 /* memory target */
5878 IEM_MC_BEGIN(0, 1);
5879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5882 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5883 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5884 } IEM_MC_ELSE() {
5885 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5886 } IEM_MC_ENDIF();
5887 IEM_MC_ADVANCE_RIP();
5888 IEM_MC_END();
5889 }
5890 return VINF_SUCCESS;
5891}
5892
5893
5894/** Opcode 0x0f 0x9a. */
5895FNIEMOP_DEF(iemOp_setp_Eb)
5896{
5897 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5898 IEMOP_HLP_MIN_386();
5899 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5900
5901 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5902 * any way. AMD says it's "unused", whatever that means. We're
5903 * ignoring for now. */
5904 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5905 {
5906 /* register target */
5907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5908 IEM_MC_BEGIN(0, 0);
5909 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5910 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5911 } IEM_MC_ELSE() {
5912 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5913 } IEM_MC_ENDIF();
5914 IEM_MC_ADVANCE_RIP();
5915 IEM_MC_END();
5916 }
5917 else
5918 {
5919 /* memory target */
5920 IEM_MC_BEGIN(0, 1);
5921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5924 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5925 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5926 } IEM_MC_ELSE() {
5927 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5928 } IEM_MC_ENDIF();
5929 IEM_MC_ADVANCE_RIP();
5930 IEM_MC_END();
5931 }
5932 return VINF_SUCCESS;
5933}
5934
5935
5936/** Opcode 0x0f 0x9b. */
5937FNIEMOP_DEF(iemOp_setnp_Eb)
5938{
5939 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5940 IEMOP_HLP_MIN_386();
5941 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5942
5943 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5944 * any way. AMD says it's "unused", whatever that means. We're
5945 * ignoring for now. */
5946 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5947 {
5948 /* register target */
5949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5950 IEM_MC_BEGIN(0, 0);
5951 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5952 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5953 } IEM_MC_ELSE() {
5954 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5955 } IEM_MC_ENDIF();
5956 IEM_MC_ADVANCE_RIP();
5957 IEM_MC_END();
5958 }
5959 else
5960 {
5961 /* memory target */
5962 IEM_MC_BEGIN(0, 1);
5963 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5966 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5967 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5968 } IEM_MC_ELSE() {
5969 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5970 } IEM_MC_ENDIF();
5971 IEM_MC_ADVANCE_RIP();
5972 IEM_MC_END();
5973 }
5974 return VINF_SUCCESS;
5975}
5976
5977
5978/** Opcode 0x0f 0x9c. */
5979FNIEMOP_DEF(iemOp_setl_Eb)
5980{
5981 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5982 IEMOP_HLP_MIN_386();
5983 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5984
5985 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5986 * any way. AMD says it's "unused", whatever that means. We're
5987 * ignoring for now. */
5988 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5989 {
5990 /* register target */
5991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5992 IEM_MC_BEGIN(0, 0);
5993 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5994 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5995 } IEM_MC_ELSE() {
5996 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5997 } IEM_MC_ENDIF();
5998 IEM_MC_ADVANCE_RIP();
5999 IEM_MC_END();
6000 }
6001 else
6002 {
6003 /* memory target */
6004 IEM_MC_BEGIN(0, 1);
6005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6008 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6009 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6010 } IEM_MC_ELSE() {
6011 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6012 } IEM_MC_ENDIF();
6013 IEM_MC_ADVANCE_RIP();
6014 IEM_MC_END();
6015 }
6016 return VINF_SUCCESS;
6017}
6018
6019
6020/** Opcode 0x0f 0x9d. */
6021FNIEMOP_DEF(iemOp_setnl_Eb)
6022{
6023 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
6024 IEMOP_HLP_MIN_386();
6025 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6026
6027 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6028 * any way. AMD says it's "unused", whatever that means. We're
6029 * ignoring for now. */
6030 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6031 {
6032 /* register target */
6033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6034 IEM_MC_BEGIN(0, 0);
6035 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6036 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6037 } IEM_MC_ELSE() {
6038 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6039 } IEM_MC_ENDIF();
6040 IEM_MC_ADVANCE_RIP();
6041 IEM_MC_END();
6042 }
6043 else
6044 {
6045 /* memory target */
6046 IEM_MC_BEGIN(0, 1);
6047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6050 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6051 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6052 } IEM_MC_ELSE() {
6053 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6054 } IEM_MC_ENDIF();
6055 IEM_MC_ADVANCE_RIP();
6056 IEM_MC_END();
6057 }
6058 return VINF_SUCCESS;
6059}
6060
6061
6062/** Opcode 0x0f 0x9e. */
6063FNIEMOP_DEF(iemOp_setle_Eb)
6064{
6065 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
6066 IEMOP_HLP_MIN_386();
6067 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6068
6069 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6070 * any way. AMD says it's "unused", whatever that means. We're
6071 * ignoring for now. */
6072 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6073 {
6074 /* register target */
6075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6076 IEM_MC_BEGIN(0, 0);
6077 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6078 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6079 } IEM_MC_ELSE() {
6080 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6081 } IEM_MC_ENDIF();
6082 IEM_MC_ADVANCE_RIP();
6083 IEM_MC_END();
6084 }
6085 else
6086 {
6087 /* memory target */
6088 IEM_MC_BEGIN(0, 1);
6089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6092 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6093 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6094 } IEM_MC_ELSE() {
6095 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6096 } IEM_MC_ENDIF();
6097 IEM_MC_ADVANCE_RIP();
6098 IEM_MC_END();
6099 }
6100 return VINF_SUCCESS;
6101}
6102
6103
6104/** Opcode 0x0f 0x9f. */
6105FNIEMOP_DEF(iemOp_setnle_Eb)
6106{
6107 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
6108 IEMOP_HLP_MIN_386();
6109 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6110
6111 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6112 * any way. AMD says it's "unused", whatever that means. We're
6113 * ignoring for now. */
6114 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6115 {
6116 /* register target */
6117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6118 IEM_MC_BEGIN(0, 0);
6119 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6120 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6121 } IEM_MC_ELSE() {
6122 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6123 } IEM_MC_ENDIF();
6124 IEM_MC_ADVANCE_RIP();
6125 IEM_MC_END();
6126 }
6127 else
6128 {
6129 /* memory target */
6130 IEM_MC_BEGIN(0, 1);
6131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6134 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6135 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6136 } IEM_MC_ELSE() {
6137 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6138 } IEM_MC_ENDIF();
6139 IEM_MC_ADVANCE_RIP();
6140 IEM_MC_END();
6141 }
6142 return VINF_SUCCESS;
6143}
6144
6145
6146/**
6147 * Common 'push segment-register' helper.
6148 */
6149FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
6150{
6151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6152 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
6153 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6154
6155 switch (pVCpu->iem.s.enmEffOpSize)
6156 {
6157 case IEMMODE_16BIT:
6158 IEM_MC_BEGIN(0, 1);
6159 IEM_MC_LOCAL(uint16_t, u16Value);
6160 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
6161 IEM_MC_PUSH_U16(u16Value);
6162 IEM_MC_ADVANCE_RIP();
6163 IEM_MC_END();
6164 break;
6165
6166 case IEMMODE_32BIT:
6167 IEM_MC_BEGIN(0, 1);
6168 IEM_MC_LOCAL(uint32_t, u32Value);
6169 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
6170 IEM_MC_PUSH_U32_SREG(u32Value);
6171 IEM_MC_ADVANCE_RIP();
6172 IEM_MC_END();
6173 break;
6174
6175 case IEMMODE_64BIT:
6176 IEM_MC_BEGIN(0, 1);
6177 IEM_MC_LOCAL(uint64_t, u64Value);
6178 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
6179 IEM_MC_PUSH_U64(u64Value);
6180 IEM_MC_ADVANCE_RIP();
6181 IEM_MC_END();
6182 break;
6183 }
6184
6185 return VINF_SUCCESS;
6186}
6187
6188
6189/** Opcode 0x0f 0xa0. */
6190FNIEMOP_DEF(iemOp_push_fs)
6191{
6192 IEMOP_MNEMONIC(push_fs, "push fs");
6193 IEMOP_HLP_MIN_386();
6194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6195 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
6196}
6197
6198
6199/** Opcode 0x0f 0xa1. */
6200FNIEMOP_DEF(iemOp_pop_fs)
6201{
6202 IEMOP_MNEMONIC(pop_fs, "pop fs");
6203 IEMOP_HLP_MIN_386();
6204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6205 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
6206}
6207
6208
6209/** Opcode 0x0f 0xa2. */
6210FNIEMOP_DEF(iemOp_cpuid)
6211{
6212 IEMOP_MNEMONIC(cpuid, "cpuid");
6213 IEMOP_HLP_MIN_486(); /* not all 486es. */
6214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6215 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
6216}
6217
6218
6219/**
6220 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
6221 * iemOp_bts_Ev_Gv.
6222 */
6223FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
6224{
6225 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6226 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6227
6228 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6229 {
6230 /* register destination. */
6231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6232 switch (pVCpu->iem.s.enmEffOpSize)
6233 {
6234 case IEMMODE_16BIT:
6235 IEM_MC_BEGIN(3, 0);
6236 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6237 IEM_MC_ARG(uint16_t, u16Src, 1);
6238 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6239
6240 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6241 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
6242 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6243 IEM_MC_REF_EFLAGS(pEFlags);
6244 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6245
6246 IEM_MC_ADVANCE_RIP();
6247 IEM_MC_END();
6248 return VINF_SUCCESS;
6249
6250 case IEMMODE_32BIT:
6251 IEM_MC_BEGIN(3, 0);
6252 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6253 IEM_MC_ARG(uint32_t, u32Src, 1);
6254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6255
6256 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6257 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
6258 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6259 IEM_MC_REF_EFLAGS(pEFlags);
6260 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6261
6262 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6263 IEM_MC_ADVANCE_RIP();
6264 IEM_MC_END();
6265 return VINF_SUCCESS;
6266
6267 case IEMMODE_64BIT:
6268 IEM_MC_BEGIN(3, 0);
6269 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6270 IEM_MC_ARG(uint64_t, u64Src, 1);
6271 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6272
6273 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6274 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
6275 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6276 IEM_MC_REF_EFLAGS(pEFlags);
6277 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6278
6279 IEM_MC_ADVANCE_RIP();
6280 IEM_MC_END();
6281 return VINF_SUCCESS;
6282
6283 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6284 }
6285 }
6286 else
6287 {
6288 /* memory destination. */
6289
6290 uint32_t fAccess;
6291 if (pImpl->pfnLockedU16)
6292 fAccess = IEM_ACCESS_DATA_RW;
6293 else /* BT */
6294 fAccess = IEM_ACCESS_DATA_R;
6295
6296 /** @todo test negative bit offsets! */
6297 switch (pVCpu->iem.s.enmEffOpSize)
6298 {
6299 case IEMMODE_16BIT:
6300 IEM_MC_BEGIN(3, 2);
6301 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6302 IEM_MC_ARG(uint16_t, u16Src, 1);
6303 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6305 IEM_MC_LOCAL(int16_t, i16AddrAdj);
6306
6307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6308 if (pImpl->pfnLockedU16)
6309 IEMOP_HLP_DONE_DECODING();
6310 else
6311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6312 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6313 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
6314 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
6315 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
6316 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
6317 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
6318 IEM_MC_FETCH_EFLAGS(EFlags);
6319
6320 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6321 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6322 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6323 else
6324 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6325 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6326
6327 IEM_MC_COMMIT_EFLAGS(EFlags);
6328 IEM_MC_ADVANCE_RIP();
6329 IEM_MC_END();
6330 return VINF_SUCCESS;
6331
6332 case IEMMODE_32BIT:
6333 IEM_MC_BEGIN(3, 2);
6334 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6335 IEM_MC_ARG(uint32_t, u32Src, 1);
6336 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6337 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6338 IEM_MC_LOCAL(int32_t, i32AddrAdj);
6339
6340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6341 if (pImpl->pfnLockedU16)
6342 IEMOP_HLP_DONE_DECODING();
6343 else
6344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6345 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6346 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
6347 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
6348 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
6349 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
6350 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
6351 IEM_MC_FETCH_EFLAGS(EFlags);
6352
6353 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6354 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6355 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6356 else
6357 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6358 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6359
6360 IEM_MC_COMMIT_EFLAGS(EFlags);
6361 IEM_MC_ADVANCE_RIP();
6362 IEM_MC_END();
6363 return VINF_SUCCESS;
6364
6365 case IEMMODE_64BIT:
6366 IEM_MC_BEGIN(3, 2);
6367 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6368 IEM_MC_ARG(uint64_t, u64Src, 1);
6369 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6371 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6372
6373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6374 if (pImpl->pfnLockedU16)
6375 IEMOP_HLP_DONE_DECODING();
6376 else
6377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6378 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6379 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6380 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6381 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6382 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6383 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6384 IEM_MC_FETCH_EFLAGS(EFlags);
6385
6386 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6387 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6388 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6389 else
6390 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6391 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6392
6393 IEM_MC_COMMIT_EFLAGS(EFlags);
6394 IEM_MC_ADVANCE_RIP();
6395 IEM_MC_END();
6396 return VINF_SUCCESS;
6397
6398 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6399 }
6400 }
6401}
6402
6403
6404/** Opcode 0x0f 0xa3. */
6405FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6406{
6407 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6408 IEMOP_HLP_MIN_386();
6409 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6410}
6411
6412
6413/**
6414 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6415 */
6416FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6417{
6418 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6419 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6420
6421 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6422 {
6423 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6425
6426 switch (pVCpu->iem.s.enmEffOpSize)
6427 {
6428 case IEMMODE_16BIT:
6429 IEM_MC_BEGIN(4, 0);
6430 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6431 IEM_MC_ARG(uint16_t, u16Src, 1);
6432 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6433 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6434
6435 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6436 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6437 IEM_MC_REF_EFLAGS(pEFlags);
6438 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6439
6440 IEM_MC_ADVANCE_RIP();
6441 IEM_MC_END();
6442 return VINF_SUCCESS;
6443
6444 case IEMMODE_32BIT:
6445 IEM_MC_BEGIN(4, 0);
6446 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6447 IEM_MC_ARG(uint32_t, u32Src, 1);
6448 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6449 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6450
6451 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6452 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6453 IEM_MC_REF_EFLAGS(pEFlags);
6454 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6455
6456 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6457 IEM_MC_ADVANCE_RIP();
6458 IEM_MC_END();
6459 return VINF_SUCCESS;
6460
6461 case IEMMODE_64BIT:
6462 IEM_MC_BEGIN(4, 0);
6463 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6464 IEM_MC_ARG(uint64_t, u64Src, 1);
6465 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6466 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6467
6468 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6469 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6470 IEM_MC_REF_EFLAGS(pEFlags);
6471 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6472
6473 IEM_MC_ADVANCE_RIP();
6474 IEM_MC_END();
6475 return VINF_SUCCESS;
6476
6477 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6478 }
6479 }
6480 else
6481 {
6482 switch (pVCpu->iem.s.enmEffOpSize)
6483 {
6484 case IEMMODE_16BIT:
6485 IEM_MC_BEGIN(4, 2);
6486 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6487 IEM_MC_ARG(uint16_t, u16Src, 1);
6488 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6489 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6491
6492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6493 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6494 IEM_MC_ASSIGN(cShiftArg, cShift);
6495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6496 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6497 IEM_MC_FETCH_EFLAGS(EFlags);
6498 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6499 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6500
6501 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6502 IEM_MC_COMMIT_EFLAGS(EFlags);
6503 IEM_MC_ADVANCE_RIP();
6504 IEM_MC_END();
6505 return VINF_SUCCESS;
6506
6507 case IEMMODE_32BIT:
6508 IEM_MC_BEGIN(4, 2);
6509 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6510 IEM_MC_ARG(uint32_t, u32Src, 1);
6511 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6512 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6513 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6514
6515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6516 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6517 IEM_MC_ASSIGN(cShiftArg, cShift);
6518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6519 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6520 IEM_MC_FETCH_EFLAGS(EFlags);
6521 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6522 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6523
6524 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6525 IEM_MC_COMMIT_EFLAGS(EFlags);
6526 IEM_MC_ADVANCE_RIP();
6527 IEM_MC_END();
6528 return VINF_SUCCESS;
6529
6530 case IEMMODE_64BIT:
6531 IEM_MC_BEGIN(4, 2);
6532 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6533 IEM_MC_ARG(uint64_t, u64Src, 1);
6534 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6535 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6537
6538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6539 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6540 IEM_MC_ASSIGN(cShiftArg, cShift);
6541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6542 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6543 IEM_MC_FETCH_EFLAGS(EFlags);
6544 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6545 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6546
6547 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6548 IEM_MC_COMMIT_EFLAGS(EFlags);
6549 IEM_MC_ADVANCE_RIP();
6550 IEM_MC_END();
6551 return VINF_SUCCESS;
6552
6553 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6554 }
6555 }
6556}
6557
6558
6559/**
6560 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6561 */
6562FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6563{
6564 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6565 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6566
6567 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6568 {
6569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6570
6571 switch (pVCpu->iem.s.enmEffOpSize)
6572 {
6573 case IEMMODE_16BIT:
6574 IEM_MC_BEGIN(4, 0);
6575 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6576 IEM_MC_ARG(uint16_t, u16Src, 1);
6577 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6578 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6579
6580 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6581 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6582 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6583 IEM_MC_REF_EFLAGS(pEFlags);
6584 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6585
6586 IEM_MC_ADVANCE_RIP();
6587 IEM_MC_END();
6588 return VINF_SUCCESS;
6589
6590 case IEMMODE_32BIT:
6591 IEM_MC_BEGIN(4, 0);
6592 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6593 IEM_MC_ARG(uint32_t, u32Src, 1);
6594 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6595 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6596
6597 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6598 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6599 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6600 IEM_MC_REF_EFLAGS(pEFlags);
6601 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6602
6603 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6604 IEM_MC_ADVANCE_RIP();
6605 IEM_MC_END();
6606 return VINF_SUCCESS;
6607
6608 case IEMMODE_64BIT:
6609 IEM_MC_BEGIN(4, 0);
6610 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6611 IEM_MC_ARG(uint64_t, u64Src, 1);
6612 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6613 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6614
6615 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6616 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6617 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6618 IEM_MC_REF_EFLAGS(pEFlags);
6619 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6620
6621 IEM_MC_ADVANCE_RIP();
6622 IEM_MC_END();
6623 return VINF_SUCCESS;
6624
6625 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6626 }
6627 }
6628 else
6629 {
6630 switch (pVCpu->iem.s.enmEffOpSize)
6631 {
6632 case IEMMODE_16BIT:
6633 IEM_MC_BEGIN(4, 2);
6634 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6635 IEM_MC_ARG(uint16_t, u16Src, 1);
6636 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6637 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6639
6640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6642 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6643 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6644 IEM_MC_FETCH_EFLAGS(EFlags);
6645 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6646 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6647
6648 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6649 IEM_MC_COMMIT_EFLAGS(EFlags);
6650 IEM_MC_ADVANCE_RIP();
6651 IEM_MC_END();
6652 return VINF_SUCCESS;
6653
6654 case IEMMODE_32BIT:
6655 IEM_MC_BEGIN(4, 2);
6656 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6657 IEM_MC_ARG(uint32_t, u32Src, 1);
6658 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6659 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6661
6662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6664 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6665 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6666 IEM_MC_FETCH_EFLAGS(EFlags);
6667 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6668 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6669
6670 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6671 IEM_MC_COMMIT_EFLAGS(EFlags);
6672 IEM_MC_ADVANCE_RIP();
6673 IEM_MC_END();
6674 return VINF_SUCCESS;
6675
6676 case IEMMODE_64BIT:
6677 IEM_MC_BEGIN(4, 2);
6678 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6679 IEM_MC_ARG(uint64_t, u64Src, 1);
6680 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6681 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6683
6684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6686 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6687 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6688 IEM_MC_FETCH_EFLAGS(EFlags);
6689 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6690 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6691
6692 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6693 IEM_MC_COMMIT_EFLAGS(EFlags);
6694 IEM_MC_ADVANCE_RIP();
6695 IEM_MC_END();
6696 return VINF_SUCCESS;
6697
6698 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6699 }
6700 }
6701}
6702
6703
6704
6705/** Opcode 0x0f 0xa4. */
6706FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6707{
6708 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6709 IEMOP_HLP_MIN_386();
6710 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6711}
6712
6713
6714/** Opcode 0x0f 0xa5. */
6715FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6716{
6717 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6718 IEMOP_HLP_MIN_386();
6719 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6720}
6721
6722
6723/** Opcode 0x0f 0xa8. */
6724FNIEMOP_DEF(iemOp_push_gs)
6725{
6726 IEMOP_MNEMONIC(push_gs, "push gs");
6727 IEMOP_HLP_MIN_386();
6728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6729 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6730}
6731
6732
6733/** Opcode 0x0f 0xa9. */
6734FNIEMOP_DEF(iemOp_pop_gs)
6735{
6736 IEMOP_MNEMONIC(pop_gs, "pop gs");
6737 IEMOP_HLP_MIN_386();
6738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6739 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6740}
6741
6742
6743/** Opcode 0x0f 0xaa. */
6744FNIEMOP_DEF(iemOp_rsm)
6745{
6746 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
6747 IEMOP_HLP_MIN_386(); /* 386SL and later. */
6748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6749 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
6750}
6751
6752
6753
6754/** Opcode 0x0f 0xab. */
6755FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6756{
6757 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6758 IEMOP_HLP_MIN_386();
6759 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6760}
6761
6762
6763/** Opcode 0x0f 0xac. */
6764FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6765{
6766 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6767 IEMOP_HLP_MIN_386();
6768 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6769}
6770
6771
6772/** Opcode 0x0f 0xad. */
6773FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6774{
6775 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6776 IEMOP_HLP_MIN_386();
6777 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6778}
6779
6780
6781/** Opcode 0x0f 0xae mem/0. */
6782FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6783{
6784 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6785 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6786 return IEMOP_RAISE_INVALID_OPCODE();
6787
6788 IEM_MC_BEGIN(3, 1);
6789 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6790 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6791 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6794 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6795 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6796 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6797 IEM_MC_END();
6798 return VINF_SUCCESS;
6799}
6800
6801
6802/** Opcode 0x0f 0xae mem/1. */
6803FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6804{
6805 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6806 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6807 return IEMOP_RAISE_INVALID_OPCODE();
6808
6809 IEM_MC_BEGIN(3, 1);
6810 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6811 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6812 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6815 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6816 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6817 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6818 IEM_MC_END();
6819 return VINF_SUCCESS;
6820}
6821
6822
6823/**
6824 * @opmaps grp15
6825 * @opcode !11/2
6826 * @oppfx none
6827 * @opcpuid sse
6828 * @opgroup og_sse_mxcsrsm
6829 * @opxcpttype 5
6830 * @optest op1=0 -> mxcsr=0
6831 * @optest op1=0x2083 -> mxcsr=0x2083
6832 * @optest op1=0xfffffffe -> value.xcpt=0xd
6833 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6834 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6835 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6836 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6837 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6838 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6839 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6840 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6841 */
6842FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6843{
6844 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6845 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6846 return IEMOP_RAISE_INVALID_OPCODE();
6847
6848 IEM_MC_BEGIN(2, 0);
6849 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6850 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6851 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6853 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6854 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6855 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6856 IEM_MC_END();
6857 return VINF_SUCCESS;
6858}
6859
6860
6861/**
6862 * @opmaps grp15
6863 * @opcode !11/3
6864 * @oppfx none
6865 * @opcpuid sse
6866 * @opgroup og_sse_mxcsrsm
6867 * @opxcpttype 5
6868 * @optest mxcsr=0 -> op1=0
6869 * @optest mxcsr=0x2083 -> op1=0x2083
6870 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6871 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6872 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6873 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6874 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6875 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6876 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6877 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6878 */
6879FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6880{
6881 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6882 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6883 return IEMOP_RAISE_INVALID_OPCODE();
6884
6885 IEM_MC_BEGIN(2, 0);
6886 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6887 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6890 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6891 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6892 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6893 IEM_MC_END();
6894 return VINF_SUCCESS;
6895}
6896
6897
6898/**
6899 * @opmaps grp15
6900 * @opcode !11/4
6901 * @oppfx none
6902 * @opcpuid xsave
6903 * @opgroup og_system
6904 * @opxcpttype none
6905 */
6906FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6907{
6908 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
6909 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6910 return IEMOP_RAISE_INVALID_OPCODE();
6911
6912 IEM_MC_BEGIN(3, 0);
6913 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6914 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6915 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6916 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6918 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6919 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6920 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6921 IEM_MC_END();
6922 return VINF_SUCCESS;
6923}
6924
6925
6926/**
6927 * @opmaps grp15
6928 * @opcode !11/5
6929 * @oppfx none
6930 * @opcpuid xsave
6931 * @opgroup og_system
6932 * @opxcpttype none
6933 */
6934FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6935{
6936 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
6937 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6938 return IEMOP_RAISE_INVALID_OPCODE();
6939
6940 IEM_MC_BEGIN(3, 0);
6941 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6942 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6943 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6946 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6947 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6948 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6949 IEM_MC_END();
6950 return VINF_SUCCESS;
6951}
6952
6953/** Opcode 0x0f 0xae mem/6. */
6954FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6955
6956/**
6957 * @opmaps grp15
6958 * @opcode !11/7
6959 * @oppfx none
6960 * @opcpuid clfsh
6961 * @opgroup og_cachectl
6962 * @optest op1=1 ->
6963 */
6964FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6965{
6966 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6967 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6968 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6969
6970 IEM_MC_BEGIN(2, 0);
6971 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6972 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6975 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6976 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6977 IEM_MC_END();
6978 return VINF_SUCCESS;
6979}
6980
6981/**
6982 * @opmaps grp15
6983 * @opcode !11/7
6984 * @oppfx 0x66
6985 * @opcpuid clflushopt
6986 * @opgroup og_cachectl
6987 * @optest op1=1 ->
6988 */
6989FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6990{
6991 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6992 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6993 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6994
6995 IEM_MC_BEGIN(2, 0);
6996 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6997 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7000 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7001 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
7002 IEM_MC_END();
7003 return VINF_SUCCESS;
7004}
7005
7006
7007/** Opcode 0x0f 0xae 11b/5. */
7008FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
7009{
7010 RT_NOREF_PV(bRm);
7011 IEMOP_MNEMONIC(lfence, "lfence");
7012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7013 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7014 return IEMOP_RAISE_INVALID_OPCODE();
7015
7016 IEM_MC_BEGIN(0, 0);
7017 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7018 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
7019 else
7020 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7021 IEM_MC_ADVANCE_RIP();
7022 IEM_MC_END();
7023 return VINF_SUCCESS;
7024}
7025
7026
7027/** Opcode 0x0f 0xae 11b/6. */
7028FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
7029{
7030 RT_NOREF_PV(bRm);
7031 IEMOP_MNEMONIC(mfence, "mfence");
7032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7033 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7034 return IEMOP_RAISE_INVALID_OPCODE();
7035
7036 IEM_MC_BEGIN(0, 0);
7037 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7038 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
7039 else
7040 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7041 IEM_MC_ADVANCE_RIP();
7042 IEM_MC_END();
7043 return VINF_SUCCESS;
7044}
7045
7046
7047/** Opcode 0x0f 0xae 11b/7. */
7048FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
7049{
7050 RT_NOREF_PV(bRm);
7051 IEMOP_MNEMONIC(sfence, "sfence");
7052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7053 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7054 return IEMOP_RAISE_INVALID_OPCODE();
7055
7056 IEM_MC_BEGIN(0, 0);
7057 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7058 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
7059 else
7060 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7061 IEM_MC_ADVANCE_RIP();
7062 IEM_MC_END();
7063 return VINF_SUCCESS;
7064}
7065
7066
7067/** Opcode 0xf3 0x0f 0xae 11b/0. */
7068FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
7069{
7070 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
7071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7072 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7073 {
7074 IEM_MC_BEGIN(1, 0);
7075 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7076 IEM_MC_ARG(uint64_t, u64Dst, 0);
7077 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
7078 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
7079 IEM_MC_ADVANCE_RIP();
7080 IEM_MC_END();
7081 }
7082 else
7083 {
7084 IEM_MC_BEGIN(1, 0);
7085 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7086 IEM_MC_ARG(uint32_t, u32Dst, 0);
7087 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
7088 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
7089 IEM_MC_ADVANCE_RIP();
7090 IEM_MC_END();
7091 }
7092 return VINF_SUCCESS;
7093}
7094
7095
7096/** Opcode 0xf3 0x0f 0xae 11b/1. */
7097FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
7098{
7099 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
7100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7101 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7102 {
7103 IEM_MC_BEGIN(1, 0);
7104 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7105 IEM_MC_ARG(uint64_t, u64Dst, 0);
7106 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
7107 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
7108 IEM_MC_ADVANCE_RIP();
7109 IEM_MC_END();
7110 }
7111 else
7112 {
7113 IEM_MC_BEGIN(1, 0);
7114 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7115 IEM_MC_ARG(uint32_t, u32Dst, 0);
7116 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
7117 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
7118 IEM_MC_ADVANCE_RIP();
7119 IEM_MC_END();
7120 }
7121 return VINF_SUCCESS;
7122}
7123
7124
7125/** Opcode 0xf3 0x0f 0xae 11b/2. */
7126FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
7127{
7128 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
7129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7130 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7131 {
7132 IEM_MC_BEGIN(1, 0);
7133 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7134 IEM_MC_ARG(uint64_t, u64Dst, 0);
7135 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7136 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7137 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
7138 IEM_MC_ADVANCE_RIP();
7139 IEM_MC_END();
7140 }
7141 else
7142 {
7143 IEM_MC_BEGIN(1, 0);
7144 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7145 IEM_MC_ARG(uint32_t, u32Dst, 0);
7146 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7147 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
7148 IEM_MC_ADVANCE_RIP();
7149 IEM_MC_END();
7150 }
7151 return VINF_SUCCESS;
7152}
7153
7154
7155/** Opcode 0xf3 0x0f 0xae 11b/3. */
7156FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
7157{
7158 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
7159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7160 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7161 {
7162 IEM_MC_BEGIN(1, 0);
7163 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7164 IEM_MC_ARG(uint64_t, u64Dst, 0);
7165 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7166 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7167 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
7168 IEM_MC_ADVANCE_RIP();
7169 IEM_MC_END();
7170 }
7171 else
7172 {
7173 IEM_MC_BEGIN(1, 0);
7174 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7175 IEM_MC_ARG(uint32_t, u32Dst, 0);
7176 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7177 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
7178 IEM_MC_ADVANCE_RIP();
7179 IEM_MC_END();
7180 }
7181 return VINF_SUCCESS;
7182}
7183
7184
7185/**
7186 * Group 15 jump table for register variant.
7187 */
7188IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
7189{ /* pfx: none, 066h, 0f3h, 0f2h */
7190 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
7191 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
7192 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
7193 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
7194 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7195 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7196 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7197 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7198};
7199AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
7200
7201
7202/**
7203 * Group 15 jump table for memory variant.
7204 */
7205IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
7206{ /* pfx: none, 066h, 0f3h, 0f2h */
7207 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7208 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7209 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7210 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7211 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7212 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7213 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7214 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7215};
7216AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
7217
7218
7219/** Opcode 0x0f 0xae. */
7220FNIEMOP_DEF(iemOp_Grp15)
7221{
7222 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
7223 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7224 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7225 /* register, register */
7226 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7227 + pVCpu->iem.s.idxPrefix], bRm);
7228 /* memory, register */
7229 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7230 + pVCpu->iem.s.idxPrefix], bRm);
7231}
7232
7233
7234/** Opcode 0x0f 0xaf. */
7235FNIEMOP_DEF(iemOp_imul_Gv_Ev)
7236{
7237 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
7238 IEMOP_HLP_MIN_386();
7239 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7240 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
7241}
7242
7243
7244/** Opcode 0x0f 0xb0. */
7245FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
7246{
7247 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
7248 IEMOP_HLP_MIN_486();
7249 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7250
7251 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7252 {
7253 IEMOP_HLP_DONE_DECODING();
7254 IEM_MC_BEGIN(4, 0);
7255 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7256 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7257 IEM_MC_ARG(uint8_t, u8Src, 2);
7258 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7259
7260 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7261 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7262 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
7263 IEM_MC_REF_EFLAGS(pEFlags);
7264 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7265 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7266 else
7267 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7268
7269 IEM_MC_ADVANCE_RIP();
7270 IEM_MC_END();
7271 }
7272 else
7273 {
7274 IEM_MC_BEGIN(4, 3);
7275 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7276 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7277 IEM_MC_ARG(uint8_t, u8Src, 2);
7278 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7279 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7280 IEM_MC_LOCAL(uint8_t, u8Al);
7281
7282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7283 IEMOP_HLP_DONE_DECODING();
7284 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7285 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7286 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
7287 IEM_MC_FETCH_EFLAGS(EFlags);
7288 IEM_MC_REF_LOCAL(pu8Al, u8Al);
7289 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7290 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7291 else
7292 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7293
7294 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7295 IEM_MC_COMMIT_EFLAGS(EFlags);
7296 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
7297 IEM_MC_ADVANCE_RIP();
7298 IEM_MC_END();
7299 }
7300 return VINF_SUCCESS;
7301}
7302
7303/** Opcode 0x0f 0xb1. */
7304FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
7305{
7306 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
7307 IEMOP_HLP_MIN_486();
7308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7309
7310 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7311 {
7312 IEMOP_HLP_DONE_DECODING();
7313 switch (pVCpu->iem.s.enmEffOpSize)
7314 {
7315 case IEMMODE_16BIT:
7316 IEM_MC_BEGIN(4, 0);
7317 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7318 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7319 IEM_MC_ARG(uint16_t, u16Src, 2);
7320 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7321
7322 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7323 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7324 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
7325 IEM_MC_REF_EFLAGS(pEFlags);
7326 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7327 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7328 else
7329 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7330
7331 IEM_MC_ADVANCE_RIP();
7332 IEM_MC_END();
7333 return VINF_SUCCESS;
7334
7335 case IEMMODE_32BIT:
7336 IEM_MC_BEGIN(4, 0);
7337 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7338 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7339 IEM_MC_ARG(uint32_t, u32Src, 2);
7340 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7341
7342 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7343 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7344 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
7345 IEM_MC_REF_EFLAGS(pEFlags);
7346 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7347 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7348 else
7349 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7350
7351 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
7352 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7353 IEM_MC_ADVANCE_RIP();
7354 IEM_MC_END();
7355 return VINF_SUCCESS;
7356
7357 case IEMMODE_64BIT:
7358 IEM_MC_BEGIN(4, 0);
7359 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7360 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7361#ifdef RT_ARCH_X86
7362 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7363#else
7364 IEM_MC_ARG(uint64_t, u64Src, 2);
7365#endif
7366 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7367
7368 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7369 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
7370 IEM_MC_REF_EFLAGS(pEFlags);
7371#ifdef RT_ARCH_X86
7372 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7373 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7374 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7375 else
7376 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7377#else
7378 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7379 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7380 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7381 else
7382 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7383#endif
7384
7385 IEM_MC_ADVANCE_RIP();
7386 IEM_MC_END();
7387 return VINF_SUCCESS;
7388
7389 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7390 }
7391 }
7392 else
7393 {
7394 switch (pVCpu->iem.s.enmEffOpSize)
7395 {
7396 case IEMMODE_16BIT:
7397 IEM_MC_BEGIN(4, 3);
7398 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7399 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7400 IEM_MC_ARG(uint16_t, u16Src, 2);
7401 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7402 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7403 IEM_MC_LOCAL(uint16_t, u16Ax);
7404
7405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7406 IEMOP_HLP_DONE_DECODING();
7407 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7408 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7409 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
7410 IEM_MC_FETCH_EFLAGS(EFlags);
7411 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
7412 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7413 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7414 else
7415 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7416
7417 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7418 IEM_MC_COMMIT_EFLAGS(EFlags);
7419 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
7420 IEM_MC_ADVANCE_RIP();
7421 IEM_MC_END();
7422 return VINF_SUCCESS;
7423
7424 case IEMMODE_32BIT:
7425 IEM_MC_BEGIN(4, 3);
7426 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7427 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7428 IEM_MC_ARG(uint32_t, u32Src, 2);
7429 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7431 IEM_MC_LOCAL(uint32_t, u32Eax);
7432
7433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7434 IEMOP_HLP_DONE_DECODING();
7435 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7436 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7437 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
7438 IEM_MC_FETCH_EFLAGS(EFlags);
7439 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
7440 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7441 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7442 else
7443 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7444
7445 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7446 IEM_MC_COMMIT_EFLAGS(EFlags);
7447 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
7448 IEM_MC_ADVANCE_RIP();
7449 IEM_MC_END();
7450 return VINF_SUCCESS;
7451
7452 case IEMMODE_64BIT:
7453 IEM_MC_BEGIN(4, 3);
7454 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7455 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7456#ifdef RT_ARCH_X86
7457 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7458#else
7459 IEM_MC_ARG(uint64_t, u64Src, 2);
7460#endif
7461 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7462 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7463 IEM_MC_LOCAL(uint64_t, u64Rax);
7464
7465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7466 IEMOP_HLP_DONE_DECODING();
7467 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7468 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
7469 IEM_MC_FETCH_EFLAGS(EFlags);
7470 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
7471#ifdef RT_ARCH_X86
7472 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7473 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7474 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7475 else
7476 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7477#else
7478 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7479 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7480 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7481 else
7482 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7483#endif
7484
7485 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7486 IEM_MC_COMMIT_EFLAGS(EFlags);
7487 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
7488 IEM_MC_ADVANCE_RIP();
7489 IEM_MC_END();
7490 return VINF_SUCCESS;
7491
7492 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7493 }
7494 }
7495}
7496
7497
7498FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
7499{
7500 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
7501 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
7502
7503 switch (pVCpu->iem.s.enmEffOpSize)
7504 {
7505 case IEMMODE_16BIT:
7506 IEM_MC_BEGIN(5, 1);
7507 IEM_MC_ARG(uint16_t, uSel, 0);
7508 IEM_MC_ARG(uint16_t, offSeg, 1);
7509 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7510 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7511 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7512 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7515 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7516 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
7517 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7518 IEM_MC_END();
7519 return VINF_SUCCESS;
7520
7521 case IEMMODE_32BIT:
7522 IEM_MC_BEGIN(5, 1);
7523 IEM_MC_ARG(uint16_t, uSel, 0);
7524 IEM_MC_ARG(uint32_t, offSeg, 1);
7525 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7526 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7527 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7528 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7531 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7532 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
7533 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7534 IEM_MC_END();
7535 return VINF_SUCCESS;
7536
7537 case IEMMODE_64BIT:
7538 IEM_MC_BEGIN(5, 1);
7539 IEM_MC_ARG(uint16_t, uSel, 0);
7540 IEM_MC_ARG(uint64_t, offSeg, 1);
7541 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7542 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7543 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7544 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7547 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
7548 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7549 else
7550 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7551 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
7552 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7553 IEM_MC_END();
7554 return VINF_SUCCESS;
7555
7556 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7557 }
7558}
7559
7560
7561/** Opcode 0x0f 0xb2. */
7562FNIEMOP_DEF(iemOp_lss_Gv_Mp)
7563{
7564 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
7565 IEMOP_HLP_MIN_386();
7566 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7567 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7568 return IEMOP_RAISE_INVALID_OPCODE();
7569 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
7570}
7571
7572
7573/** Opcode 0x0f 0xb3. */
7574FNIEMOP_DEF(iemOp_btr_Ev_Gv)
7575{
7576 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
7577 IEMOP_HLP_MIN_386();
7578 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
7579}
7580
7581
7582/** Opcode 0x0f 0xb4. */
7583FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
7584{
7585 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
7586 IEMOP_HLP_MIN_386();
7587 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7588 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7589 return IEMOP_RAISE_INVALID_OPCODE();
7590 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
7591}
7592
7593
7594/** Opcode 0x0f 0xb5. */
7595FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
7596{
7597 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
7598 IEMOP_HLP_MIN_386();
7599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7601 return IEMOP_RAISE_INVALID_OPCODE();
7602 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
7603}
7604
7605
7606/** Opcode 0x0f 0xb6. */
7607FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
7608{
7609 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
7610 IEMOP_HLP_MIN_386();
7611
7612 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7613
7614 /*
7615 * If rm is denoting a register, no more instruction bytes.
7616 */
7617 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7618 {
7619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7620 switch (pVCpu->iem.s.enmEffOpSize)
7621 {
7622 case IEMMODE_16BIT:
7623 IEM_MC_BEGIN(0, 1);
7624 IEM_MC_LOCAL(uint16_t, u16Value);
7625 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7626 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7627 IEM_MC_ADVANCE_RIP();
7628 IEM_MC_END();
7629 return VINF_SUCCESS;
7630
7631 case IEMMODE_32BIT:
7632 IEM_MC_BEGIN(0, 1);
7633 IEM_MC_LOCAL(uint32_t, u32Value);
7634 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7635 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7636 IEM_MC_ADVANCE_RIP();
7637 IEM_MC_END();
7638 return VINF_SUCCESS;
7639
7640 case IEMMODE_64BIT:
7641 IEM_MC_BEGIN(0, 1);
7642 IEM_MC_LOCAL(uint64_t, u64Value);
7643 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7644 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7645 IEM_MC_ADVANCE_RIP();
7646 IEM_MC_END();
7647 return VINF_SUCCESS;
7648
7649 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7650 }
7651 }
7652 else
7653 {
7654 /*
7655 * We're loading a register from memory.
7656 */
7657 switch (pVCpu->iem.s.enmEffOpSize)
7658 {
7659 case IEMMODE_16BIT:
7660 IEM_MC_BEGIN(0, 2);
7661 IEM_MC_LOCAL(uint16_t, u16Value);
7662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7665 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7666 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7667 IEM_MC_ADVANCE_RIP();
7668 IEM_MC_END();
7669 return VINF_SUCCESS;
7670
7671 case IEMMODE_32BIT:
7672 IEM_MC_BEGIN(0, 2);
7673 IEM_MC_LOCAL(uint32_t, u32Value);
7674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7677 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7678 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7679 IEM_MC_ADVANCE_RIP();
7680 IEM_MC_END();
7681 return VINF_SUCCESS;
7682
7683 case IEMMODE_64BIT:
7684 IEM_MC_BEGIN(0, 2);
7685 IEM_MC_LOCAL(uint64_t, u64Value);
7686 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7687 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7689 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7690 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7691 IEM_MC_ADVANCE_RIP();
7692 IEM_MC_END();
7693 return VINF_SUCCESS;
7694
7695 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7696 }
7697 }
7698}
7699
7700
7701/** Opcode 0x0f 0xb7. */
7702FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7703{
7704 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7705 IEMOP_HLP_MIN_386();
7706
7707 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7708
7709 /** @todo Not entirely sure how the operand size prefix is handled here,
7710 * assuming that it will be ignored. Would be nice to have a few
7711 * test for this. */
7712 /*
7713 * If rm is denoting a register, no more instruction bytes.
7714 */
7715 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7716 {
7717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7718 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7719 {
7720 IEM_MC_BEGIN(0, 1);
7721 IEM_MC_LOCAL(uint32_t, u32Value);
7722 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7723 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7724 IEM_MC_ADVANCE_RIP();
7725 IEM_MC_END();
7726 }
7727 else
7728 {
7729 IEM_MC_BEGIN(0, 1);
7730 IEM_MC_LOCAL(uint64_t, u64Value);
7731 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7732 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7733 IEM_MC_ADVANCE_RIP();
7734 IEM_MC_END();
7735 }
7736 }
7737 else
7738 {
7739 /*
7740 * We're loading a register from memory.
7741 */
7742 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7743 {
7744 IEM_MC_BEGIN(0, 2);
7745 IEM_MC_LOCAL(uint32_t, u32Value);
7746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7749 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7750 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7751 IEM_MC_ADVANCE_RIP();
7752 IEM_MC_END();
7753 }
7754 else
7755 {
7756 IEM_MC_BEGIN(0, 2);
7757 IEM_MC_LOCAL(uint64_t, u64Value);
7758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7761 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7762 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7763 IEM_MC_ADVANCE_RIP();
7764 IEM_MC_END();
7765 }
7766 }
7767 return VINF_SUCCESS;
7768}
7769
7770
7771/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7772FNIEMOP_UD_STUB(iemOp_jmpe);
7773/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7774FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7775
7776
7777/**
7778 * @opcode 0xb9
7779 * @opinvalid intel-modrm
7780 * @optest ->
7781 */
7782FNIEMOP_DEF(iemOp_Grp10)
7783{
7784 /*
7785 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7786 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7787 */
7788 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7789 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
7790 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7791}
7792
7793
7794/** Opcode 0x0f 0xba. */
7795FNIEMOP_DEF(iemOp_Grp8)
7796{
7797 IEMOP_HLP_MIN_386();
7798 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7799 PCIEMOPBINSIZES pImpl;
7800 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7801 {
7802 case 0: case 1: case 2: case 3:
7803 /* Both AMD and Intel want full modr/m decoding and imm8. */
7804 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7805 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7806 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7807 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7808 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7809 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7810 }
7811 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7812
7813 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7814 {
7815 /* register destination. */
7816 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7818
7819 switch (pVCpu->iem.s.enmEffOpSize)
7820 {
7821 case IEMMODE_16BIT:
7822 IEM_MC_BEGIN(3, 0);
7823 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7824 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7825 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7826
7827 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7828 IEM_MC_REF_EFLAGS(pEFlags);
7829 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7830
7831 IEM_MC_ADVANCE_RIP();
7832 IEM_MC_END();
7833 return VINF_SUCCESS;
7834
7835 case IEMMODE_32BIT:
7836 IEM_MC_BEGIN(3, 0);
7837 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7838 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7839 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7840
7841 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7842 IEM_MC_REF_EFLAGS(pEFlags);
7843 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7844
7845 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7846 IEM_MC_ADVANCE_RIP();
7847 IEM_MC_END();
7848 return VINF_SUCCESS;
7849
7850 case IEMMODE_64BIT:
7851 IEM_MC_BEGIN(3, 0);
7852 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7853 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7854 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7855
7856 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7857 IEM_MC_REF_EFLAGS(pEFlags);
7858 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7859
7860 IEM_MC_ADVANCE_RIP();
7861 IEM_MC_END();
7862 return VINF_SUCCESS;
7863
7864 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7865 }
7866 }
7867 else
7868 {
7869 /* memory destination. */
7870
7871 uint32_t fAccess;
7872 if (pImpl->pfnLockedU16)
7873 fAccess = IEM_ACCESS_DATA_RW;
7874 else /* BT */
7875 fAccess = IEM_ACCESS_DATA_R;
7876
7877 /** @todo test negative bit offsets! */
7878 switch (pVCpu->iem.s.enmEffOpSize)
7879 {
7880 case IEMMODE_16BIT:
7881 IEM_MC_BEGIN(3, 1);
7882 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7883 IEM_MC_ARG(uint16_t, u16Src, 1);
7884 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7885 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7886
7887 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7888 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7889 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7890 if (pImpl->pfnLockedU16)
7891 IEMOP_HLP_DONE_DECODING();
7892 else
7893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7894 IEM_MC_FETCH_EFLAGS(EFlags);
7895 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7896 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7897 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7898 else
7899 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7900 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7901
7902 IEM_MC_COMMIT_EFLAGS(EFlags);
7903 IEM_MC_ADVANCE_RIP();
7904 IEM_MC_END();
7905 return VINF_SUCCESS;
7906
7907 case IEMMODE_32BIT:
7908 IEM_MC_BEGIN(3, 1);
7909 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7910 IEM_MC_ARG(uint32_t, u32Src, 1);
7911 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7912 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7913
7914 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7915 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7916 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7917 if (pImpl->pfnLockedU16)
7918 IEMOP_HLP_DONE_DECODING();
7919 else
7920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7921 IEM_MC_FETCH_EFLAGS(EFlags);
7922 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7923 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7924 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7925 else
7926 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7927 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7928
7929 IEM_MC_COMMIT_EFLAGS(EFlags);
7930 IEM_MC_ADVANCE_RIP();
7931 IEM_MC_END();
7932 return VINF_SUCCESS;
7933
7934 case IEMMODE_64BIT:
7935 IEM_MC_BEGIN(3, 1);
7936 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7937 IEM_MC_ARG(uint64_t, u64Src, 1);
7938 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7940
7941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7942 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7943 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7944 if (pImpl->pfnLockedU16)
7945 IEMOP_HLP_DONE_DECODING();
7946 else
7947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7948 IEM_MC_FETCH_EFLAGS(EFlags);
7949 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7950 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7951 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7952 else
7953 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7954 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7955
7956 IEM_MC_COMMIT_EFLAGS(EFlags);
7957 IEM_MC_ADVANCE_RIP();
7958 IEM_MC_END();
7959 return VINF_SUCCESS;
7960
7961 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7962 }
7963 }
7964}
7965
7966
7967/** Opcode 0x0f 0xbb. */
7968FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7969{
7970 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7971 IEMOP_HLP_MIN_386();
7972 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7973}
7974
7975
7976/** Opcode 0x0f 0xbc. */
7977FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7978{
7979 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7980 IEMOP_HLP_MIN_386();
7981 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7982 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7983}
7984
7985
7986/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7987FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7988
7989
7990/** Opcode 0x0f 0xbd. */
7991FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7992{
7993 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7994 IEMOP_HLP_MIN_386();
7995 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7996 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7997}
7998
7999
8000/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
8001FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
8002
8003
8004/** Opcode 0x0f 0xbe. */
8005FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
8006{
8007 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
8008 IEMOP_HLP_MIN_386();
8009
8010 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8011
8012 /*
8013 * If rm is denoting a register, no more instruction bytes.
8014 */
8015 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8016 {
8017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8018 switch (pVCpu->iem.s.enmEffOpSize)
8019 {
8020 case IEMMODE_16BIT:
8021 IEM_MC_BEGIN(0, 1);
8022 IEM_MC_LOCAL(uint16_t, u16Value);
8023 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8024 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
8025 IEM_MC_ADVANCE_RIP();
8026 IEM_MC_END();
8027 return VINF_SUCCESS;
8028
8029 case IEMMODE_32BIT:
8030 IEM_MC_BEGIN(0, 1);
8031 IEM_MC_LOCAL(uint32_t, u32Value);
8032 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8033 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8034 IEM_MC_ADVANCE_RIP();
8035 IEM_MC_END();
8036 return VINF_SUCCESS;
8037
8038 case IEMMODE_64BIT:
8039 IEM_MC_BEGIN(0, 1);
8040 IEM_MC_LOCAL(uint64_t, u64Value);
8041 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8042 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8043 IEM_MC_ADVANCE_RIP();
8044 IEM_MC_END();
8045 return VINF_SUCCESS;
8046
8047 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8048 }
8049 }
8050 else
8051 {
8052 /*
8053 * We're loading a register from memory.
8054 */
8055 switch (pVCpu->iem.s.enmEffOpSize)
8056 {
8057 case IEMMODE_16BIT:
8058 IEM_MC_BEGIN(0, 2);
8059 IEM_MC_LOCAL(uint16_t, u16Value);
8060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8063 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8064 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
8065 IEM_MC_ADVANCE_RIP();
8066 IEM_MC_END();
8067 return VINF_SUCCESS;
8068
8069 case IEMMODE_32BIT:
8070 IEM_MC_BEGIN(0, 2);
8071 IEM_MC_LOCAL(uint32_t, u32Value);
8072 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8075 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8076 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8077 IEM_MC_ADVANCE_RIP();
8078 IEM_MC_END();
8079 return VINF_SUCCESS;
8080
8081 case IEMMODE_64BIT:
8082 IEM_MC_BEGIN(0, 2);
8083 IEM_MC_LOCAL(uint64_t, u64Value);
8084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8087 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8088 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8089 IEM_MC_ADVANCE_RIP();
8090 IEM_MC_END();
8091 return VINF_SUCCESS;
8092
8093 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8094 }
8095 }
8096}
8097
8098
8099/** Opcode 0x0f 0xbf. */
8100FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
8101{
8102 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
8103 IEMOP_HLP_MIN_386();
8104
8105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8106
8107 /** @todo Not entirely sure how the operand size prefix is handled here,
8108 * assuming that it will be ignored. Would be nice to have a few
8109 * test for this. */
8110 /*
8111 * If rm is denoting a register, no more instruction bytes.
8112 */
8113 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8114 {
8115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8116 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8117 {
8118 IEM_MC_BEGIN(0, 1);
8119 IEM_MC_LOCAL(uint32_t, u32Value);
8120 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8121 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8122 IEM_MC_ADVANCE_RIP();
8123 IEM_MC_END();
8124 }
8125 else
8126 {
8127 IEM_MC_BEGIN(0, 1);
8128 IEM_MC_LOCAL(uint64_t, u64Value);
8129 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8130 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8131 IEM_MC_ADVANCE_RIP();
8132 IEM_MC_END();
8133 }
8134 }
8135 else
8136 {
8137 /*
8138 * We're loading a register from memory.
8139 */
8140 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8141 {
8142 IEM_MC_BEGIN(0, 2);
8143 IEM_MC_LOCAL(uint32_t, u32Value);
8144 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8147 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8148 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8149 IEM_MC_ADVANCE_RIP();
8150 IEM_MC_END();
8151 }
8152 else
8153 {
8154 IEM_MC_BEGIN(0, 2);
8155 IEM_MC_LOCAL(uint64_t, u64Value);
8156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8159 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8160 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8161 IEM_MC_ADVANCE_RIP();
8162 IEM_MC_END();
8163 }
8164 }
8165 return VINF_SUCCESS;
8166}
8167
8168
8169/** Opcode 0x0f 0xc0. */
8170FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
8171{
8172 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8173 IEMOP_HLP_MIN_486();
8174 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
8175
8176 /*
8177 * If rm is denoting a register, no more instruction bytes.
8178 */
8179 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8180 {
8181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8182
8183 IEM_MC_BEGIN(3, 0);
8184 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8185 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8186 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8187
8188 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8189 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8190 IEM_MC_REF_EFLAGS(pEFlags);
8191 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8192
8193 IEM_MC_ADVANCE_RIP();
8194 IEM_MC_END();
8195 }
8196 else
8197 {
8198 /*
8199 * We're accessing memory.
8200 */
8201 IEM_MC_BEGIN(3, 3);
8202 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8203 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8204 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8205 IEM_MC_LOCAL(uint8_t, u8RegCopy);
8206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8207
8208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8209 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8210 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8211 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
8212 IEM_MC_FETCH_EFLAGS(EFlags);
8213 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8214 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8215 else
8216 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
8217
8218 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
8219 IEM_MC_COMMIT_EFLAGS(EFlags);
8220 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
8221 IEM_MC_ADVANCE_RIP();
8222 IEM_MC_END();
8223 return VINF_SUCCESS;
8224 }
8225 return VINF_SUCCESS;
8226}
8227
8228
8229/** Opcode 0x0f 0xc1. */
8230FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
8231{
8232 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
8233 IEMOP_HLP_MIN_486();
8234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8235
8236 /*
8237 * If rm is denoting a register, no more instruction bytes.
8238 */
8239 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8240 {
8241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8242
8243 switch (pVCpu->iem.s.enmEffOpSize)
8244 {
8245 case IEMMODE_16BIT:
8246 IEM_MC_BEGIN(3, 0);
8247 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8248 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8249 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8250
8251 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8252 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8253 IEM_MC_REF_EFLAGS(pEFlags);
8254 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8255
8256 IEM_MC_ADVANCE_RIP();
8257 IEM_MC_END();
8258 return VINF_SUCCESS;
8259
8260 case IEMMODE_32BIT:
8261 IEM_MC_BEGIN(3, 0);
8262 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8263 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8264 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8265
8266 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8267 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8268 IEM_MC_REF_EFLAGS(pEFlags);
8269 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8270
8271 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8272 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
8273 IEM_MC_ADVANCE_RIP();
8274 IEM_MC_END();
8275 return VINF_SUCCESS;
8276
8277 case IEMMODE_64BIT:
8278 IEM_MC_BEGIN(3, 0);
8279 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8280 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8281 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8282
8283 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8284 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8285 IEM_MC_REF_EFLAGS(pEFlags);
8286 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8287
8288 IEM_MC_ADVANCE_RIP();
8289 IEM_MC_END();
8290 return VINF_SUCCESS;
8291
8292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8293 }
8294 }
8295 else
8296 {
8297 /*
8298 * We're accessing memory.
8299 */
8300 switch (pVCpu->iem.s.enmEffOpSize)
8301 {
8302 case IEMMODE_16BIT:
8303 IEM_MC_BEGIN(3, 3);
8304 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8305 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8306 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8307 IEM_MC_LOCAL(uint16_t, u16RegCopy);
8308 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8309
8310 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8311 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8312 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8313 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
8314 IEM_MC_FETCH_EFLAGS(EFlags);
8315 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8316 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8317 else
8318 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
8319
8320 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8321 IEM_MC_COMMIT_EFLAGS(EFlags);
8322 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
8323 IEM_MC_ADVANCE_RIP();
8324 IEM_MC_END();
8325 return VINF_SUCCESS;
8326
8327 case IEMMODE_32BIT:
8328 IEM_MC_BEGIN(3, 3);
8329 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8330 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8331 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8332 IEM_MC_LOCAL(uint32_t, u32RegCopy);
8333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8334
8335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8336 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8337 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8338 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
8339 IEM_MC_FETCH_EFLAGS(EFlags);
8340 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8341 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8342 else
8343 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
8344
8345 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8346 IEM_MC_COMMIT_EFLAGS(EFlags);
8347 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
8348 IEM_MC_ADVANCE_RIP();
8349 IEM_MC_END();
8350 return VINF_SUCCESS;
8351
8352 case IEMMODE_64BIT:
8353 IEM_MC_BEGIN(3, 3);
8354 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8355 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8356 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8357 IEM_MC_LOCAL(uint64_t, u64RegCopy);
8358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8359
8360 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8361 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8362 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8363 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
8364 IEM_MC_FETCH_EFLAGS(EFlags);
8365 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8366 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8367 else
8368 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
8369
8370 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8371 IEM_MC_COMMIT_EFLAGS(EFlags);
8372 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
8373 IEM_MC_ADVANCE_RIP();
8374 IEM_MC_END();
8375 return VINF_SUCCESS;
8376
8377 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8378 }
8379 }
8380}
8381
8382
8383/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
8384FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
8385/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
8386FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
8387/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
8388FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
8389/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
8390FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
8391
8392
8393/** Opcode 0x0f 0xc3. */
8394FNIEMOP_DEF(iemOp_movnti_My_Gy)
8395{
8396 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
8397
8398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8399
8400 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
8401 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8402 {
8403 switch (pVCpu->iem.s.enmEffOpSize)
8404 {
8405 case IEMMODE_32BIT:
8406 IEM_MC_BEGIN(0, 2);
8407 IEM_MC_LOCAL(uint32_t, u32Value);
8408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8409
8410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8412 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8413 return IEMOP_RAISE_INVALID_OPCODE();
8414
8415 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8416 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
8417 IEM_MC_ADVANCE_RIP();
8418 IEM_MC_END();
8419 break;
8420
8421 case IEMMODE_64BIT:
8422 IEM_MC_BEGIN(0, 2);
8423 IEM_MC_LOCAL(uint64_t, u64Value);
8424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8425
8426 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8428 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8429 return IEMOP_RAISE_INVALID_OPCODE();
8430
8431 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8432 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
8433 IEM_MC_ADVANCE_RIP();
8434 IEM_MC_END();
8435 break;
8436
8437 case IEMMODE_16BIT:
8438 /** @todo check this form. */
8439 return IEMOP_RAISE_INVALID_OPCODE();
8440 }
8441 }
8442 else
8443 return IEMOP_RAISE_INVALID_OPCODE();
8444 return VINF_SUCCESS;
8445}
8446/* Opcode 0x66 0x0f 0xc3 - invalid */
8447/* Opcode 0xf3 0x0f 0xc3 - invalid */
8448/* Opcode 0xf2 0x0f 0xc3 - invalid */
8449
8450/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
8451FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
8452/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
8453FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
8454/* Opcode 0xf3 0x0f 0xc4 - invalid */
8455/* Opcode 0xf2 0x0f 0xc4 - invalid */
8456
8457/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
8458FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
8459/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
8460FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
8461/* Opcode 0xf3 0x0f 0xc5 - invalid */
8462/* Opcode 0xf2 0x0f 0xc5 - invalid */
8463
8464/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
8465FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
8466/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
8467FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
8468/* Opcode 0xf3 0x0f 0xc6 - invalid */
8469/* Opcode 0xf2 0x0f 0xc6 - invalid */
8470
8471
8472/** Opcode 0x0f 0xc7 !11/1. */
8473FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
8474{
8475 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
8476
8477 IEM_MC_BEGIN(4, 3);
8478 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
8479 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
8480 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
8481 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8482 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
8483 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
8484 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8485
8486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8487 IEMOP_HLP_DONE_DECODING();
8488 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8489
8490 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
8491 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
8492 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
8493
8494 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
8495 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
8496 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
8497
8498 IEM_MC_FETCH_EFLAGS(EFlags);
8499 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8500 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8501 else
8502 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8503
8504 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
8505 IEM_MC_COMMIT_EFLAGS(EFlags);
8506 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8507 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
8508 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
8509 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
8510 IEM_MC_ENDIF();
8511 IEM_MC_ADVANCE_RIP();
8512
8513 IEM_MC_END();
8514 return VINF_SUCCESS;
8515}
8516
8517
8518/** Opcode REX.W 0x0f 0xc7 !11/1. */
8519FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
8520{
8521 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
8522 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8523 {
8524#if 0
8525 RT_NOREF(bRm);
8526 IEMOP_BITCH_ABOUT_STUB();
8527 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8528#else
8529 IEM_MC_BEGIN(4, 3);
8530 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
8531 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
8532 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
8533 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8534 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
8535 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
8536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8537
8538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8539 IEMOP_HLP_DONE_DECODING();
8540 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
8541 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8542
8543 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
8544 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
8545 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
8546
8547 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
8548 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
8549 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
8550
8551 IEM_MC_FETCH_EFLAGS(EFlags);
8552# ifdef RT_ARCH_AMD64
8553 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8554 {
8555 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8556 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8557 else
8558 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8559 }
8560 else
8561# endif
8562 {
8563 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
8564 accesses and not all all atomic, which works fine on in UNI CPU guest
8565 configuration (ignoring DMA). If guest SMP is active we have no choice
8566 but to use a rendezvous callback here. Sigh. */
8567 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
8568 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8569 else
8570 {
8571 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8572 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
8573 }
8574 }
8575
8576 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
8577 IEM_MC_COMMIT_EFLAGS(EFlags);
8578 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8579 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
8580 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
8581 IEM_MC_ENDIF();
8582 IEM_MC_ADVANCE_RIP();
8583
8584 IEM_MC_END();
8585 return VINF_SUCCESS;
8586#endif
8587 }
8588 Log(("cmpxchg16b -> #UD\n"));
8589 return IEMOP_RAISE_INVALID_OPCODE();
8590}
8591
8592FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
8593{
8594 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
8595 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
8596 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
8597}
8598
8599/** Opcode 0x0f 0xc7 11/6. */
8600FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
8601
8602/** Opcode 0x0f 0xc7 !11/6. */
8603#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8604FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
8605{
8606 IEMOP_MNEMONIC(vmptrld, "vmptrld");
8607 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
8608 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
8609 IEM_MC_BEGIN(2, 0);
8610 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8611 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
8612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8613 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
8614 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8615 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
8616 IEM_MC_END();
8617 return VINF_SUCCESS;
8618}
8619#else
8620FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
8621#endif
8622
8623/** Opcode 0x66 0x0f 0xc7 !11/6. */
8624#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8625FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
8626{
8627 IEMOP_MNEMONIC(vmclear, "vmclear");
8628 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
8629 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
8630 IEM_MC_BEGIN(2, 0);
8631 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8632 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
8633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8634 IEMOP_HLP_DONE_DECODING();
8635 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8636 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
8637 IEM_MC_END();
8638 return VINF_SUCCESS;
8639}
8640#else
8641FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
8642#endif
8643
8644/** Opcode 0xf3 0x0f 0xc7 !11/6. */
8645#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8646FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
8647{
8648 IEMOP_MNEMONIC(vmxon, "vmxon");
8649 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
8650 IEM_MC_BEGIN(2, 0);
8651 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8652 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
8653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8654 IEMOP_HLP_DONE_DECODING();
8655 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8656 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
8657 IEM_MC_END();
8658 return VINF_SUCCESS;
8659}
8660#else
8661FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
8662#endif
8663
8664/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
8665#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8666FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
8667{
8668 IEMOP_MNEMONIC(vmptrst, "vmptrst");
8669 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
8670 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
8671 IEM_MC_BEGIN(2, 0);
8672 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8673 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
8674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8675 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
8676 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8677 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
8678 IEM_MC_END();
8679 return VINF_SUCCESS;
8680}
8681#else
8682FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
8683#endif
8684
8685/** Opcode 0x0f 0xc7 11/7. */
8686FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
8687
8688
8689/**
8690 * Group 9 jump table for register variant.
8691 */
8692IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
8693{ /* pfx: none, 066h, 0f3h, 0f2h */
8694 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8695 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
8696 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8697 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8698 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8699 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8700 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8701 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8702};
8703AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
8704
8705
8706/**
8707 * Group 9 jump table for memory variant.
8708 */
8709IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
8710{ /* pfx: none, 066h, 0f3h, 0f2h */
8711 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8712 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
8713 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8714 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8715 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8716 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8717 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
8718 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8719};
8720AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
8721
8722
8723/** Opcode 0x0f 0xc7. */
8724FNIEMOP_DEF(iemOp_Grp9)
8725{
8726 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
8727 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8728 /* register, register */
8729 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8730 + pVCpu->iem.s.idxPrefix], bRm);
8731 /* memory, register */
8732 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8733 + pVCpu->iem.s.idxPrefix], bRm);
8734}
8735
8736
8737/**
8738 * Common 'bswap register' helper.
8739 */
8740FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
8741{
8742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8743 switch (pVCpu->iem.s.enmEffOpSize)
8744 {
8745 case IEMMODE_16BIT:
8746 IEM_MC_BEGIN(1, 0);
8747 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8748 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
8749 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
8750 IEM_MC_ADVANCE_RIP();
8751 IEM_MC_END();
8752 return VINF_SUCCESS;
8753
8754 case IEMMODE_32BIT:
8755 IEM_MC_BEGIN(1, 0);
8756 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8757 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8758 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8759 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
8760 IEM_MC_ADVANCE_RIP();
8761 IEM_MC_END();
8762 return VINF_SUCCESS;
8763
8764 case IEMMODE_64BIT:
8765 IEM_MC_BEGIN(1, 0);
8766 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8767 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8768 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
8769 IEM_MC_ADVANCE_RIP();
8770 IEM_MC_END();
8771 return VINF_SUCCESS;
8772
8773 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8774 }
8775}
8776
8777
8778/** Opcode 0x0f 0xc8. */
8779FNIEMOP_DEF(iemOp_bswap_rAX_r8)
8780{
8781 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
8782 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
8783 prefix. REX.B is the correct prefix it appears. For a parallel
8784 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
8785 IEMOP_HLP_MIN_486();
8786 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8787}
8788
8789
8790/** Opcode 0x0f 0xc9. */
8791FNIEMOP_DEF(iemOp_bswap_rCX_r9)
8792{
8793 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
8794 IEMOP_HLP_MIN_486();
8795 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8796}
8797
8798
8799/** Opcode 0x0f 0xca. */
8800FNIEMOP_DEF(iemOp_bswap_rDX_r10)
8801{
8802 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
8803 IEMOP_HLP_MIN_486();
8804 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8805}
8806
8807
8808/** Opcode 0x0f 0xcb. */
8809FNIEMOP_DEF(iemOp_bswap_rBX_r11)
8810{
8811 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
8812 IEMOP_HLP_MIN_486();
8813 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8814}
8815
8816
8817/** Opcode 0x0f 0xcc. */
8818FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8819{
8820 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8821 IEMOP_HLP_MIN_486();
8822 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8823}
8824
8825
8826/** Opcode 0x0f 0xcd. */
8827FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8828{
8829 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8830 IEMOP_HLP_MIN_486();
8831 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8832}
8833
8834
8835/** Opcode 0x0f 0xce. */
8836FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8837{
8838 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8839 IEMOP_HLP_MIN_486();
8840 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8841}
8842
8843
8844/** Opcode 0x0f 0xcf. */
8845FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8846{
8847 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8848 IEMOP_HLP_MIN_486();
8849 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8850}
8851
8852
8853/* Opcode 0x0f 0xd0 - invalid */
8854/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8855FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8856/* Opcode 0xf3 0x0f 0xd0 - invalid */
8857/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8858FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8859
8860/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8861FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8862/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8863FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8864/* Opcode 0xf3 0x0f 0xd1 - invalid */
8865/* Opcode 0xf2 0x0f 0xd1 - invalid */
8866
8867/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8868FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8869/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8870FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8871/* Opcode 0xf3 0x0f 0xd2 - invalid */
8872/* Opcode 0xf2 0x0f 0xd2 - invalid */
8873
8874/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8875FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8876/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8877FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8878/* Opcode 0xf3 0x0f 0xd3 - invalid */
8879/* Opcode 0xf2 0x0f 0xd3 - invalid */
8880
8881/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8882FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8883/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8884FNIEMOP_STUB(iemOp_paddq_Vx_W);
8885/* Opcode 0xf3 0x0f 0xd4 - invalid */
8886/* Opcode 0xf2 0x0f 0xd4 - invalid */
8887
8888/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8889FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8890/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8891FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8892/* Opcode 0xf3 0x0f 0xd5 - invalid */
8893/* Opcode 0xf2 0x0f 0xd5 - invalid */
8894
8895/* Opcode 0x0f 0xd6 - invalid */
8896
8897/**
8898 * @opcode 0xd6
8899 * @oppfx 0x66
8900 * @opcpuid sse2
8901 * @opgroup og_sse2_pcksclr_datamove
8902 * @opxcpttype none
8903 * @optest op1=-1 op2=2 -> op1=2
8904 * @optest op1=0 op2=-42 -> op1=-42
8905 */
8906FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8907{
8908 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8909 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8910 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8911 {
8912 /*
8913 * Register, register.
8914 */
8915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8916 IEM_MC_BEGIN(0, 2);
8917 IEM_MC_LOCAL(uint64_t, uSrc);
8918
8919 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8920 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8921
8922 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8923 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8924
8925 IEM_MC_ADVANCE_RIP();
8926 IEM_MC_END();
8927 }
8928 else
8929 {
8930 /*
8931 * Memory, register.
8932 */
8933 IEM_MC_BEGIN(0, 2);
8934 IEM_MC_LOCAL(uint64_t, uSrc);
8935 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8936
8937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8939 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8940 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8941
8942 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8943 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8944
8945 IEM_MC_ADVANCE_RIP();
8946 IEM_MC_END();
8947 }
8948 return VINF_SUCCESS;
8949}
8950
8951
8952/**
8953 * @opcode 0xd6
8954 * @opcodesub 11 mr/reg
8955 * @oppfx f3
8956 * @opcpuid sse2
8957 * @opgroup og_sse2_simdint_datamove
8958 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8959 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8960 */
8961FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
8962{
8963 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8964 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8965 {
8966 /*
8967 * Register, register.
8968 */
8969 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8971 IEM_MC_BEGIN(0, 1);
8972 IEM_MC_LOCAL(uint64_t, uSrc);
8973
8974 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8975 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8976
8977 IEM_MC_FETCH_MREG_U64(uSrc, bRm & X86_MODRM_RM_MASK);
8978 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
8979 IEM_MC_FPU_TO_MMX_MODE();
8980
8981 IEM_MC_ADVANCE_RIP();
8982 IEM_MC_END();
8983 return VINF_SUCCESS;
8984 }
8985
8986 /**
8987 * @opdone
8988 * @opmnemonic udf30fd6mem
8989 * @opcode 0xd6
8990 * @opcodesub !11 mr/reg
8991 * @oppfx f3
8992 * @opunused intel-modrm
8993 * @opcpuid sse
8994 * @optest ->
8995 */
8996 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8997}
8998
8999
9000/**
9001 * @opcode 0xd6
9002 * @opcodesub 11 mr/reg
9003 * @oppfx f2
9004 * @opcpuid sse2
9005 * @opgroup og_sse2_simdint_datamove
9006 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
9007 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
9008 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
9009 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
9010 * @optest op1=-42 op2=0xfedcba9876543210
9011 * -> op1=0xfedcba9876543210 ftw=0xff
9012 */
9013FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
9014{
9015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9016 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9017 {
9018 /*
9019 * Register, register.
9020 */
9021 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9023 IEM_MC_BEGIN(0, 1);
9024 IEM_MC_LOCAL(uint64_t, uSrc);
9025
9026 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9027 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9028
9029 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9030 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, uSrc);
9031 IEM_MC_FPU_TO_MMX_MODE();
9032
9033 IEM_MC_ADVANCE_RIP();
9034 IEM_MC_END();
9035 return VINF_SUCCESS;
9036 }
9037
9038 /**
9039 * @opdone
9040 * @opmnemonic udf20fd6mem
9041 * @opcode 0xd6
9042 * @opcodesub !11 mr/reg
9043 * @oppfx f2
9044 * @opunused intel-modrm
9045 * @opcpuid sse
9046 * @optest ->
9047 */
9048 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
9049}
9050
9051/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
9052FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
9053{
9054 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
9055 /** @todo testcase: Check that the instruction implicitly clears the high
9056 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
9057 * and opcode modifications are made to work with the whole width (not
9058 * just 128). */
9059 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
9060 /* Docs says register only. */
9061 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9062 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
9063 {
9064 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
9065 IEM_MC_BEGIN(2, 0);
9066 IEM_MC_ARG(uint64_t *, pDst, 0);
9067 IEM_MC_ARG(uint64_t const *, pSrc, 1);
9068 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
9069 IEM_MC_PREPARE_FPU_USAGE();
9070 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9071 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
9072 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
9073 IEM_MC_ADVANCE_RIP();
9074 IEM_MC_END();
9075 return VINF_SUCCESS;
9076 }
9077 return IEMOP_RAISE_INVALID_OPCODE();
9078}
9079
9080/** Opcode 0x66 0x0f 0xd7 - */
9081FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
9082{
9083 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
9084 /** @todo testcase: Check that the instruction implicitly clears the high
9085 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
9086 * and opcode modifications are made to work with the whole width (not
9087 * just 128). */
9088 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
9089 /* Docs says register only. */
9090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9091 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
9092 {
9093 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
9094 IEM_MC_BEGIN(2, 0);
9095 IEM_MC_ARG(uint64_t *, pDst, 0);
9096 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
9097 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9098 IEM_MC_PREPARE_SSE_USAGE();
9099 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9100 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9101 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
9102 IEM_MC_ADVANCE_RIP();
9103 IEM_MC_END();
9104 return VINF_SUCCESS;
9105 }
9106 return IEMOP_RAISE_INVALID_OPCODE();
9107}
9108
9109/* Opcode 0xf3 0x0f 0xd7 - invalid */
9110/* Opcode 0xf2 0x0f 0xd7 - invalid */
9111
9112
9113/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
9114FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
9115/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
9116FNIEMOP_STUB(iemOp_psubusb_Vx_W);
9117/* Opcode 0xf3 0x0f 0xd8 - invalid */
9118/* Opcode 0xf2 0x0f 0xd8 - invalid */
9119
9120/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
9121FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
9122/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
9123FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
9124/* Opcode 0xf3 0x0f 0xd9 - invalid */
9125/* Opcode 0xf2 0x0f 0xd9 - invalid */
9126
9127/** Opcode 0x0f 0xda - pminub Pq, Qq */
9128FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
9129/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
9130FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
9131/* Opcode 0xf3 0x0f 0xda - invalid */
9132/* Opcode 0xf2 0x0f 0xda - invalid */
9133
9134/** Opcode 0x0f 0xdb - pand Pq, Qq */
9135FNIEMOP_STUB(iemOp_pand_Pq_Qq);
9136/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
9137FNIEMOP_STUB(iemOp_pand_Vx_W);
9138/* Opcode 0xf3 0x0f 0xdb - invalid */
9139/* Opcode 0xf2 0x0f 0xdb - invalid */
9140
9141/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
9142FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
9143/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
9144FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
9145/* Opcode 0xf3 0x0f 0xdc - invalid */
9146/* Opcode 0xf2 0x0f 0xdc - invalid */
9147
9148/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
9149FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
9150/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
9151FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
9152/* Opcode 0xf3 0x0f 0xdd - invalid */
9153/* Opcode 0xf2 0x0f 0xdd - invalid */
9154
9155/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
9156FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
9157/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
9158FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
9159/* Opcode 0xf3 0x0f 0xde - invalid */
9160/* Opcode 0xf2 0x0f 0xde - invalid */
9161
9162/** Opcode 0x0f 0xdf - pandn Pq, Qq */
9163FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
9164/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
9165FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
9166/* Opcode 0xf3 0x0f 0xdf - invalid */
9167/* Opcode 0xf2 0x0f 0xdf - invalid */
9168
9169/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
9170FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
9171/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
9172FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
9173/* Opcode 0xf3 0x0f 0xe0 - invalid */
9174/* Opcode 0xf2 0x0f 0xe0 - invalid */
9175
9176/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
9177FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
9178/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
9179FNIEMOP_STUB(iemOp_psraw_Vx_W);
9180/* Opcode 0xf3 0x0f 0xe1 - invalid */
9181/* Opcode 0xf2 0x0f 0xe1 - invalid */
9182
9183/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
9184FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
9185/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
9186FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
9187/* Opcode 0xf3 0x0f 0xe2 - invalid */
9188/* Opcode 0xf2 0x0f 0xe2 - invalid */
9189
9190/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
9191FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
9192/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
9193FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
9194/* Opcode 0xf3 0x0f 0xe3 - invalid */
9195/* Opcode 0xf2 0x0f 0xe3 - invalid */
9196
9197/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
9198FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
9199/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
9200FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
9201/* Opcode 0xf3 0x0f 0xe4 - invalid */
9202/* Opcode 0xf2 0x0f 0xe4 - invalid */
9203
9204/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
9205FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
9206/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
9207FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
9208/* Opcode 0xf3 0x0f 0xe5 - invalid */
9209/* Opcode 0xf2 0x0f 0xe5 - invalid */
9210
9211/* Opcode 0x0f 0xe6 - invalid */
9212/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
9213FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
9214/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
9215FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
9216/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
9217FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
9218
9219
9220/**
9221 * @opcode 0xe7
9222 * @opcodesub !11 mr/reg
9223 * @oppfx none
9224 * @opcpuid sse
9225 * @opgroup og_sse1_cachect
9226 * @opxcpttype none
9227 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
9228 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
9229 */
9230FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
9231{
9232 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9234 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9235 {
9236 /* Register, memory. */
9237 IEM_MC_BEGIN(0, 2);
9238 IEM_MC_LOCAL(uint64_t, uSrc);
9239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9240
9241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9243 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
9244 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9245
9246 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9247 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9248 IEM_MC_FPU_TO_MMX_MODE();
9249
9250 IEM_MC_ADVANCE_RIP();
9251 IEM_MC_END();
9252 return VINF_SUCCESS;
9253 }
9254 /**
9255 * @opdone
9256 * @opmnemonic ud0fe7reg
9257 * @opcode 0xe7
9258 * @opcodesub 11 mr/reg
9259 * @oppfx none
9260 * @opunused immediate
9261 * @opcpuid sse
9262 * @optest ->
9263 */
9264 return IEMOP_RAISE_INVALID_OPCODE();
9265}
9266
9267/**
9268 * @opcode 0xe7
9269 * @opcodesub !11 mr/reg
9270 * @oppfx 0x66
9271 * @opcpuid sse2
9272 * @opgroup og_sse2_cachect
9273 * @opxcpttype 1
9274 * @optest op1=-1 op2=2 -> op1=2
9275 * @optest op1=0 op2=-42 -> op1=-42
9276 */
9277FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
9278{
9279 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9280 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9281 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9282 {
9283 /* Register, memory. */
9284 IEM_MC_BEGIN(0, 2);
9285 IEM_MC_LOCAL(RTUINT128U, uSrc);
9286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9287
9288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9290 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9291 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9292
9293 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9294 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9295
9296 IEM_MC_ADVANCE_RIP();
9297 IEM_MC_END();
9298 return VINF_SUCCESS;
9299 }
9300
9301 /**
9302 * @opdone
9303 * @opmnemonic ud660fe7reg
9304 * @opcode 0xe7
9305 * @opcodesub 11 mr/reg
9306 * @oppfx 0x66
9307 * @opunused immediate
9308 * @opcpuid sse
9309 * @optest ->
9310 */
9311 return IEMOP_RAISE_INVALID_OPCODE();
9312}
9313
9314/* Opcode 0xf3 0x0f 0xe7 - invalid */
9315/* Opcode 0xf2 0x0f 0xe7 - invalid */
9316
9317
9318/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
9319FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
9320/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
9321FNIEMOP_STUB(iemOp_psubsb_Vx_W);
9322/* Opcode 0xf3 0x0f 0xe8 - invalid */
9323/* Opcode 0xf2 0x0f 0xe8 - invalid */
9324
9325/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
9326FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
9327/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
9328FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
9329/* Opcode 0xf3 0x0f 0xe9 - invalid */
9330/* Opcode 0xf2 0x0f 0xe9 - invalid */
9331
9332/** Opcode 0x0f 0xea - pminsw Pq, Qq */
9333FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
9334/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
9335FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
9336/* Opcode 0xf3 0x0f 0xea - invalid */
9337/* Opcode 0xf2 0x0f 0xea - invalid */
9338
9339/** Opcode 0x0f 0xeb - por Pq, Qq */
9340FNIEMOP_STUB(iemOp_por_Pq_Qq);
9341/** Opcode 0x66 0x0f 0xeb - por Vx, W */
9342FNIEMOP_STUB(iemOp_por_Vx_W);
9343/* Opcode 0xf3 0x0f 0xeb - invalid */
9344/* Opcode 0xf2 0x0f 0xeb - invalid */
9345
9346/** Opcode 0x0f 0xec - paddsb Pq, Qq */
9347FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
9348/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
9349FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
9350/* Opcode 0xf3 0x0f 0xec - invalid */
9351/* Opcode 0xf2 0x0f 0xec - invalid */
9352
9353/** Opcode 0x0f 0xed - paddsw Pq, Qq */
9354FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
9355/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
9356FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
9357/* Opcode 0xf3 0x0f 0xed - invalid */
9358/* Opcode 0xf2 0x0f 0xed - invalid */
9359
9360/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
9361FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
9362/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
9363FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
9364/* Opcode 0xf3 0x0f 0xee - invalid */
9365/* Opcode 0xf2 0x0f 0xee - invalid */
9366
9367
9368/** Opcode 0x0f 0xef - pxor Pq, Qq */
9369FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
9370{
9371 IEMOP_MNEMONIC(pxor, "pxor");
9372 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
9373}
9374
9375/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
9376FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
9377{
9378 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
9379 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
9380}
9381
9382/* Opcode 0xf3 0x0f 0xef - invalid */
9383/* Opcode 0xf2 0x0f 0xef - invalid */
9384
9385/* Opcode 0x0f 0xf0 - invalid */
9386/* Opcode 0x66 0x0f 0xf0 - invalid */
9387/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
9388FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
9389
9390/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
9391FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
9392/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
9393FNIEMOP_STUB(iemOp_psllw_Vx_W);
9394/* Opcode 0xf2 0x0f 0xf1 - invalid */
9395
9396/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
9397FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
9398/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
9399FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
9400/* Opcode 0xf2 0x0f 0xf2 - invalid */
9401
9402/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
9403FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
9404/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
9405FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
9406/* Opcode 0xf2 0x0f 0xf3 - invalid */
9407
9408/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
9409FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
9410/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
9411FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
9412/* Opcode 0xf2 0x0f 0xf4 - invalid */
9413
9414/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
9415FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
9416/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
9417FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
9418/* Opcode 0xf2 0x0f 0xf5 - invalid */
9419
9420/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
9421FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
9422/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
9423FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
9424/* Opcode 0xf2 0x0f 0xf6 - invalid */
9425
9426/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
9427FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
9428/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
9429FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
9430/* Opcode 0xf2 0x0f 0xf7 - invalid */
9431
9432/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
9433FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
9434/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
9435FNIEMOP_STUB(iemOp_psubb_Vx_W);
9436/* Opcode 0xf2 0x0f 0xf8 - invalid */
9437
9438/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
9439FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
9440/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
9441FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
9442/* Opcode 0xf2 0x0f 0xf9 - invalid */
9443
9444/** Opcode 0x0f 0xfa - psubd Pq, Qq */
9445FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
9446/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
9447FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
9448/* Opcode 0xf2 0x0f 0xfa - invalid */
9449
9450/** Opcode 0x0f 0xfb - psubq Pq, Qq */
9451FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
9452/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
9453FNIEMOP_STUB(iemOp_psubq_Vx_W);
9454/* Opcode 0xf2 0x0f 0xfb - invalid */
9455
9456/** Opcode 0x0f 0xfc - paddb Pq, Qq */
9457FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
9458/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
9459FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
9460/* Opcode 0xf2 0x0f 0xfc - invalid */
9461
9462/** Opcode 0x0f 0xfd - paddw Pq, Qq */
9463FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
9464/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
9465FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
9466/* Opcode 0xf2 0x0f 0xfd - invalid */
9467
9468/** Opcode 0x0f 0xfe - paddd Pq, Qq */
9469FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
9470/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
9471FNIEMOP_STUB(iemOp_paddd_Vx_W);
9472/* Opcode 0xf2 0x0f 0xfe - invalid */
9473
9474
9475/** Opcode **** 0x0f 0xff - UD0 */
9476FNIEMOP_DEF(iemOp_ud0)
9477{
9478 IEMOP_MNEMONIC(ud0, "ud0");
9479 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
9480 {
9481 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
9482#ifndef TST_IEM_CHECK_MC
9483 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9484 {
9485 RTGCPTR GCPtrEff;
9486 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
9487 if (rcStrict != VINF_SUCCESS)
9488 return rcStrict;
9489 }
9490#endif
9491 IEMOP_HLP_DONE_DECODING();
9492 }
9493 return IEMOP_RAISE_INVALID_OPCODE();
9494}
9495
9496
9497
9498/**
9499 * Two byte opcode map, first byte 0x0f.
9500 *
9501 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
9502 * check if it needs updating as well when making changes.
9503 */
9504IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
9505{
9506 /* no prefix, 066h prefix f3h prefix, f2h prefix */
9507 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
9508 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
9509 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
9510 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
9511 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
9512 /* 0x05 */ IEMOP_X4(iemOp_syscall),
9513 /* 0x06 */ IEMOP_X4(iemOp_clts),
9514 /* 0x07 */ IEMOP_X4(iemOp_sysret),
9515 /* 0x08 */ IEMOP_X4(iemOp_invd),
9516 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
9517 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
9518 /* 0x0b */ IEMOP_X4(iemOp_ud2),
9519 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
9520 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
9521 /* 0x0e */ IEMOP_X4(iemOp_femms),
9522 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
9523
9524 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
9525 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
9526 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
9527 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9528 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9529 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9530 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
9531 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9532 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
9533 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
9534 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
9535 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
9536 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
9537 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
9538 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
9539 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
9540
9541 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
9542 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
9543 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
9544 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
9545 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
9546 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9547 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
9548 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9549 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9550 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9551 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
9552 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9553 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
9554 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
9555 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9556 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9557
9558 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
9559 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
9560 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
9561 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
9562 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
9563 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
9564 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
9565 /* 0x37 */ IEMOP_X4(iemOp_getsec),
9566 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
9567 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9568 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
9569 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9570 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9571 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9572 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9573 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9574
9575 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
9576 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
9577 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
9578 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
9579 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
9580 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
9581 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
9582 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
9583 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
9584 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
9585 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
9586 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
9587 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
9588 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
9589 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
9590 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
9591
9592 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9593 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
9594 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
9595 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
9596 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9597 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9598 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9599 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9600 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
9601 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
9602 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
9603 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
9604 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
9605 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
9606 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
9607 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
9608
9609 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9610 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9611 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9612 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9613 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9614 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9615 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9616 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9617 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9618 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9619 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9620 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9621 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9622 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9623 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9624 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
9625
9626 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
9627 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
9628 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
9629 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
9630 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9631 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9632 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9633 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9634
9635 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9636 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9637 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9638 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9639 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
9640 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
9641 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
9642 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
9643
9644 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
9645 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
9646 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
9647 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
9648 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
9649 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
9650 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
9651 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
9652 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
9653 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
9654 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
9655 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
9656 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
9657 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
9658 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
9659 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
9660
9661 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
9662 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
9663 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
9664 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
9665 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
9666 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
9667 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
9668 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
9669 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
9670 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
9671 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
9672 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
9673 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
9674 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
9675 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
9676 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
9677
9678 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
9679 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
9680 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
9681 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
9682 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
9683 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
9684 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
9685 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
9686 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
9687 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
9688 /* 0xaa */ IEMOP_X4(iemOp_rsm),
9689 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
9690 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
9691 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
9692 /* 0xae */ IEMOP_X4(iemOp_Grp15),
9693 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
9694
9695 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
9696 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
9697 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
9698 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
9699 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
9700 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
9701 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
9702 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
9703 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
9704 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
9705 /* 0xba */ IEMOP_X4(iemOp_Grp8),
9706 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
9707 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
9708 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
9709 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
9710 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
9711
9712 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
9713 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
9714 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
9715 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9716 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9717 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9718 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9719 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
9720 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
9721 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
9722 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
9723 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
9724 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
9725 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
9726 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
9727 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
9728
9729 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
9730 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9731 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9732 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9733 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9734 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9735 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
9736 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9737 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9738 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9739 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9740 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9741 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9742 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9743 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9744 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9745
9746 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9747 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9748 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9749 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9750 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9751 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9752 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
9753 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9754 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9755 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9756 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9757 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9758 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9759 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9760 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9761 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9762
9763 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
9764 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9765 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9766 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9767 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9768 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9769 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9770 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9771 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9772 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9773 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9774 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9775 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9776 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9777 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9778 /* 0xff */ IEMOP_X4(iemOp_ud0),
9779};
9780AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
9781
9782/** @} */
9783
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette