VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 73937

Last change on this file since 73937 was 73937, checked in by vboxsync, 6 years ago

VMM/IEM, HM: Nested VMX: bugref:9180 Implemented VMWRITE instruction.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 337.0 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 73937 2018-08-29 06:12:35Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
38 }
39
40 /* Ignore operand size here, memory refs are always 16-bit. */
41 IEM_MC_BEGIN(2, 0);
42 IEM_MC_ARG(uint16_t, iEffSeg, 0);
43 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
44 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
45 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
46 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
47 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
48 IEM_MC_END();
49 return VINF_SUCCESS;
50}
51
52
53/** Opcode 0x0f 0x00 /1. */
54FNIEMOPRM_DEF(iemOp_Grp6_str)
55{
56 IEMOP_MNEMONIC(str, "str Rv/Mw");
57 IEMOP_HLP_MIN_286();
58 IEMOP_HLP_NO_REAL_OR_V86_MODE();
59
60
61 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
62 {
63 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
64 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
65 }
66
67 /* Ignore operand size here, memory refs are always 16-bit. */
68 IEM_MC_BEGIN(2, 0);
69 IEM_MC_ARG(uint16_t, iEffSeg, 0);
70 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
71 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
72 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
73 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
74 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
75 IEM_MC_END();
76 return VINF_SUCCESS;
77}
78
79
80/** Opcode 0x0f 0x00 /2. */
81FNIEMOPRM_DEF(iemOp_Grp6_lldt)
82{
83 IEMOP_MNEMONIC(lldt, "lldt Ew");
84 IEMOP_HLP_MIN_286();
85 IEMOP_HLP_NO_REAL_OR_V86_MODE();
86
87 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
88 {
89 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
90 IEM_MC_BEGIN(1, 0);
91 IEM_MC_ARG(uint16_t, u16Sel, 0);
92 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
93 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
94 IEM_MC_END();
95 }
96 else
97 {
98 IEM_MC_BEGIN(1, 1);
99 IEM_MC_ARG(uint16_t, u16Sel, 0);
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
102 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
103 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
104 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
105 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
106 IEM_MC_END();
107 }
108 return VINF_SUCCESS;
109}
110
111
112/** Opcode 0x0f 0x00 /3. */
113FNIEMOPRM_DEF(iemOp_Grp6_ltr)
114{
115 IEMOP_MNEMONIC(ltr, "ltr Ew");
116 IEMOP_HLP_MIN_286();
117 IEMOP_HLP_NO_REAL_OR_V86_MODE();
118
119 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
120 {
121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
122 IEM_MC_BEGIN(1, 0);
123 IEM_MC_ARG(uint16_t, u16Sel, 0);
124 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
125 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
126 IEM_MC_END();
127 }
128 else
129 {
130 IEM_MC_BEGIN(1, 1);
131 IEM_MC_ARG(uint16_t, u16Sel, 0);
132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
135 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
136 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
137 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
138 IEM_MC_END();
139 }
140 return VINF_SUCCESS;
141}
142
143
144/** Opcode 0x0f 0x00 /3. */
145FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
146{
147 IEMOP_HLP_MIN_286();
148 IEMOP_HLP_NO_REAL_OR_V86_MODE();
149
150 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
151 {
152 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
153 IEM_MC_BEGIN(2, 0);
154 IEM_MC_ARG(uint16_t, u16Sel, 0);
155 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
156 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
157 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
158 IEM_MC_END();
159 }
160 else
161 {
162 IEM_MC_BEGIN(2, 1);
163 IEM_MC_ARG(uint16_t, u16Sel, 0);
164 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
167 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
168 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
169 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
170 IEM_MC_END();
171 }
172 return VINF_SUCCESS;
173}
174
175
176/** Opcode 0x0f 0x00 /4. */
177FNIEMOPRM_DEF(iemOp_Grp6_verr)
178{
179 IEMOP_MNEMONIC(verr, "verr Ew");
180 IEMOP_HLP_MIN_286();
181 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
182}
183
184
185/** Opcode 0x0f 0x00 /5. */
186FNIEMOPRM_DEF(iemOp_Grp6_verw)
187{
188 IEMOP_MNEMONIC(verw, "verw Ew");
189 IEMOP_HLP_MIN_286();
190 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
191}
192
193
194/**
195 * Group 6 jump table.
196 */
197IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
198{
199 iemOp_Grp6_sldt,
200 iemOp_Grp6_str,
201 iemOp_Grp6_lldt,
202 iemOp_Grp6_ltr,
203 iemOp_Grp6_verr,
204 iemOp_Grp6_verw,
205 iemOp_InvalidWithRM,
206 iemOp_InvalidWithRM
207};
208
209/** Opcode 0x0f 0x00. */
210FNIEMOP_DEF(iemOp_Grp6)
211{
212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
213 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
214}
215
216
217/** Opcode 0x0f 0x01 /0. */
218FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
219{
220 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
221 IEMOP_HLP_MIN_286();
222 IEMOP_HLP_64BIT_OP_SIZE();
223 IEM_MC_BEGIN(2, 1);
224 IEM_MC_ARG(uint8_t, iEffSeg, 0);
225 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
228 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
229 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
230 IEM_MC_END();
231 return VINF_SUCCESS;
232}
233
234
235/** Opcode 0x0f 0x01 /0. */
236FNIEMOP_DEF(iemOp_Grp7_vmcall)
237{
238 IEMOP_MNEMONIC(vmcall, "vmcall");
239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
240
241 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
242 want all hypercalls regardless of instruction used, and if a
243 hypercall isn't handled by GIM or HMSvm will raise an #UD.
244 (NEM/win makes ASSUMPTIONS about this behavior.) */
245 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
246}
247
248
249/** Opcode 0x0f 0x01 /0. */
250FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
251{
252 IEMOP_BITCH_ABOUT_STUB();
253 return IEMOP_RAISE_INVALID_OPCODE();
254}
255
256
257/** Opcode 0x0f 0x01 /0. */
258FNIEMOP_DEF(iemOp_Grp7_vmresume)
259{
260 IEMOP_BITCH_ABOUT_STUB();
261 return IEMOP_RAISE_INVALID_OPCODE();
262}
263
264
265/** Opcode 0x0f 0x01 /0. */
266#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
267FNIEMOP_DEF(iemOp_Grp7_vmxoff)
268{
269 IEMOP_MNEMONIC(vmxoff, "vmxoff");
270 IEMOP_HLP_DONE_DECODING();
271 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
272}
273#else
274FNIEMOP_DEF(iemOp_Grp7_vmxoff)
275{
276 IEMOP_BITCH_ABOUT_STUB();
277 return IEMOP_RAISE_INVALID_OPCODE();
278}
279#endif
280
281
282/** Opcode 0x0f 0x01 /1. */
283FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
284{
285 IEMOP_MNEMONIC(sidt, "sidt Ms");
286 IEMOP_HLP_MIN_286();
287 IEMOP_HLP_64BIT_OP_SIZE();
288 IEM_MC_BEGIN(2, 1);
289 IEM_MC_ARG(uint8_t, iEffSeg, 0);
290 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
293 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
294 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
295 IEM_MC_END();
296 return VINF_SUCCESS;
297}
298
299
300/** Opcode 0x0f 0x01 /1. */
301FNIEMOP_DEF(iemOp_Grp7_monitor)
302{
303 IEMOP_MNEMONIC(monitor, "monitor");
304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
305 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
306}
307
308
309/** Opcode 0x0f 0x01 /1. */
310FNIEMOP_DEF(iemOp_Grp7_mwait)
311{
312 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
314 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
315}
316
317
318/** Opcode 0x0f 0x01 /2. */
319FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
320{
321 IEMOP_MNEMONIC(lgdt, "lgdt");
322 IEMOP_HLP_64BIT_OP_SIZE();
323 IEM_MC_BEGIN(3, 1);
324 IEM_MC_ARG(uint8_t, iEffSeg, 0);
325 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
326 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
329 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
330 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
331 IEM_MC_END();
332 return VINF_SUCCESS;
333}
334
335
336/** Opcode 0x0f 0x01 0xd0. */
337FNIEMOP_DEF(iemOp_Grp7_xgetbv)
338{
339 IEMOP_MNEMONIC(xgetbv, "xgetbv");
340 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
341 {
342 /** @todo r=ramshankar: We should use
343 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
344 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
345 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
346 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
347 }
348 return IEMOP_RAISE_INVALID_OPCODE();
349}
350
351
352/** Opcode 0x0f 0x01 0xd1. */
353FNIEMOP_DEF(iemOp_Grp7_xsetbv)
354{
355 IEMOP_MNEMONIC(xsetbv, "xsetbv");
356 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
357 {
358 /** @todo r=ramshankar: We should use
359 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
360 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
361 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
362 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
363 }
364 return IEMOP_RAISE_INVALID_OPCODE();
365}
366
367
368/** Opcode 0x0f 0x01 /3. */
369FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
370{
371 IEMOP_MNEMONIC(lidt, "lidt");
372 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
373 ? IEMMODE_64BIT
374 : pVCpu->iem.s.enmEffOpSize;
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint8_t, iEffSeg, 0);
377 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
378 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
381 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
382 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
383 IEM_MC_END();
384 return VINF_SUCCESS;
385}
386
387
388/** Opcode 0x0f 0x01 0xd8. */
389#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
390FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
391{
392 IEMOP_MNEMONIC(vmrun, "vmrun");
393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
394 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
395}
396#else
397FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
398#endif
399
400/** Opcode 0x0f 0x01 0xd9. */
401FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
402{
403 IEMOP_MNEMONIC(vmmcall, "vmmcall");
404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
405
406 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
407 want all hypercalls regardless of instruction used, and if a
408 hypercall isn't handled by GIM or HMSvm will raise an #UD.
409 (NEM/win makes ASSUMPTIONS about this behavior.) */
410 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
411}
412
413/** Opcode 0x0f 0x01 0xda. */
414#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
415FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
416{
417 IEMOP_MNEMONIC(vmload, "vmload");
418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
419 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
420}
421#else
422FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
423#endif
424
425
426/** Opcode 0x0f 0x01 0xdb. */
427#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
428FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
429{
430 IEMOP_MNEMONIC(vmsave, "vmsave");
431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
432 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
433}
434#else
435FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
436#endif
437
438
439/** Opcode 0x0f 0x01 0xdc. */
440#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
441FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
442{
443 IEMOP_MNEMONIC(stgi, "stgi");
444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
445 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
446}
447#else
448FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
449#endif
450
451
452/** Opcode 0x0f 0x01 0xdd. */
453#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
454FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
455{
456 IEMOP_MNEMONIC(clgi, "clgi");
457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
458 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
459}
460#else
461FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
462#endif
463
464
465/** Opcode 0x0f 0x01 0xdf. */
466#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
467FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
468{
469 IEMOP_MNEMONIC(invlpga, "invlpga");
470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
471 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
472}
473#else
474FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
475#endif
476
477
478/** Opcode 0x0f 0x01 0xde. */
479#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
480FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
481{
482 IEMOP_MNEMONIC(skinit, "skinit");
483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
484 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
485}
486#else
487FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
488#endif
489
490
491/** Opcode 0x0f 0x01 /4. */
492FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
493{
494 IEMOP_MNEMONIC(smsw, "smsw");
495 IEMOP_HLP_MIN_286();
496 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
497 {
498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
499 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
500 }
501
502 /* Ignore operand size here, memory refs are always 16-bit. */
503 IEM_MC_BEGIN(2, 0);
504 IEM_MC_ARG(uint16_t, iEffSeg, 0);
505 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
508 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
509 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
510 IEM_MC_END();
511 return VINF_SUCCESS;
512}
513
514
515/** Opcode 0x0f 0x01 /6. */
516FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
517{
518 /* The operand size is effectively ignored, all is 16-bit and only the
519 lower 3-bits are used. */
520 IEMOP_MNEMONIC(lmsw, "lmsw");
521 IEMOP_HLP_MIN_286();
522 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
523 {
524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
525 IEM_MC_BEGIN(1, 0);
526 IEM_MC_ARG(uint16_t, u16Tmp, 0);
527 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
528 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
529 IEM_MC_END();
530 }
531 else
532 {
533 IEM_MC_BEGIN(1, 1);
534 IEM_MC_ARG(uint16_t, u16Tmp, 0);
535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
538 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
539 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
540 IEM_MC_END();
541 }
542 return VINF_SUCCESS;
543}
544
545
546/** Opcode 0x0f 0x01 /7. */
547FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
548{
549 IEMOP_MNEMONIC(invlpg, "invlpg");
550 IEMOP_HLP_MIN_486();
551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
552 IEM_MC_BEGIN(1, 1);
553 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
555 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
556 IEM_MC_END();
557 return VINF_SUCCESS;
558}
559
560
561/** Opcode 0x0f 0x01 /7. */
562FNIEMOP_DEF(iemOp_Grp7_swapgs)
563{
564 IEMOP_MNEMONIC(swapgs, "swapgs");
565 IEMOP_HLP_ONLY_64BIT();
566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
567 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
568}
569
570
571/** Opcode 0x0f 0x01 /7. */
572FNIEMOP_DEF(iemOp_Grp7_rdtscp)
573{
574 IEMOP_MNEMONIC(rdtscp, "rdtscp");
575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
576 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
577}
578
579
580/**
581 * Group 7 jump table, memory variant.
582 */
583IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
584{
585 iemOp_Grp7_sgdt,
586 iemOp_Grp7_sidt,
587 iemOp_Grp7_lgdt,
588 iemOp_Grp7_lidt,
589 iemOp_Grp7_smsw,
590 iemOp_InvalidWithRM,
591 iemOp_Grp7_lmsw,
592 iemOp_Grp7_invlpg
593};
594
595
596/** Opcode 0x0f 0x01. */
597FNIEMOP_DEF(iemOp_Grp7)
598{
599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
600 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
601 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
602
603 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
604 {
605 case 0:
606 switch (bRm & X86_MODRM_RM_MASK)
607 {
608 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
609 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
610 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
611 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
612 }
613 return IEMOP_RAISE_INVALID_OPCODE();
614
615 case 1:
616 switch (bRm & X86_MODRM_RM_MASK)
617 {
618 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
619 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
620 }
621 return IEMOP_RAISE_INVALID_OPCODE();
622
623 case 2:
624 switch (bRm & X86_MODRM_RM_MASK)
625 {
626 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
627 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
628 }
629 return IEMOP_RAISE_INVALID_OPCODE();
630
631 case 3:
632 switch (bRm & X86_MODRM_RM_MASK)
633 {
634 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
635 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
636 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
637 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
638 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
639 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
640 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
641 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
642 IEM_NOT_REACHED_DEFAULT_CASE_RET();
643 }
644
645 case 4:
646 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
647
648 case 5:
649 return IEMOP_RAISE_INVALID_OPCODE();
650
651 case 6:
652 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
653
654 case 7:
655 switch (bRm & X86_MODRM_RM_MASK)
656 {
657 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
658 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
659 }
660 return IEMOP_RAISE_INVALID_OPCODE();
661
662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
663 }
664}
665
666/** Opcode 0x0f 0x00 /3. */
667FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
668{
669 IEMOP_HLP_NO_REAL_OR_V86_MODE();
670 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
671
672 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
673 {
674 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
675 switch (pVCpu->iem.s.enmEffOpSize)
676 {
677 case IEMMODE_16BIT:
678 {
679 IEM_MC_BEGIN(3, 0);
680 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
681 IEM_MC_ARG(uint16_t, u16Sel, 1);
682 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
683
684 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
685 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
686 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
687
688 IEM_MC_END();
689 return VINF_SUCCESS;
690 }
691
692 case IEMMODE_32BIT:
693 case IEMMODE_64BIT:
694 {
695 IEM_MC_BEGIN(3, 0);
696 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
697 IEM_MC_ARG(uint16_t, u16Sel, 1);
698 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
699
700 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
701 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
702 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
703
704 IEM_MC_END();
705 return VINF_SUCCESS;
706 }
707
708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
709 }
710 }
711 else
712 {
713 switch (pVCpu->iem.s.enmEffOpSize)
714 {
715 case IEMMODE_16BIT:
716 {
717 IEM_MC_BEGIN(3, 1);
718 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
719 IEM_MC_ARG(uint16_t, u16Sel, 1);
720 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
722
723 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
724 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
725
726 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
727 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
728 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
729
730 IEM_MC_END();
731 return VINF_SUCCESS;
732 }
733
734 case IEMMODE_32BIT:
735 case IEMMODE_64BIT:
736 {
737 IEM_MC_BEGIN(3, 1);
738 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
739 IEM_MC_ARG(uint16_t, u16Sel, 1);
740 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
742
743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
744 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
745/** @todo testcase: make sure it's a 16-bit read. */
746
747 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
748 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
749 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
750
751 IEM_MC_END();
752 return VINF_SUCCESS;
753 }
754
755 IEM_NOT_REACHED_DEFAULT_CASE_RET();
756 }
757 }
758}
759
760
761
762/** Opcode 0x0f 0x02. */
763FNIEMOP_DEF(iemOp_lar_Gv_Ew)
764{
765 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
766 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
767}
768
769
770/** Opcode 0x0f 0x03. */
771FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
772{
773 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
774 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
775}
776
777
778/** Opcode 0x0f 0x05. */
779FNIEMOP_DEF(iemOp_syscall)
780{
781 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
783 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
784}
785
786
787/** Opcode 0x0f 0x06. */
788FNIEMOP_DEF(iemOp_clts)
789{
790 IEMOP_MNEMONIC(clts, "clts");
791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
792 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
793}
794
795
796/** Opcode 0x0f 0x07. */
797FNIEMOP_DEF(iemOp_sysret)
798{
799 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
801 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
802}
803
804
805/** Opcode 0x0f 0x08. */
806FNIEMOP_DEF(iemOp_invd)
807{
808 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
809 IEMOP_HLP_MIN_486();
810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
811 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
812}
813
814
815/** Opcode 0x0f 0x09. */
816FNIEMOP_DEF(iemOp_wbinvd)
817{
818 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
819 IEMOP_HLP_MIN_486();
820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
821 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
822}
823
824
825/** Opcode 0x0f 0x0b. */
826FNIEMOP_DEF(iemOp_ud2)
827{
828 IEMOP_MNEMONIC(ud2, "ud2");
829 return IEMOP_RAISE_INVALID_OPCODE();
830}
831
832/** Opcode 0x0f 0x0d. */
833FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
834{
835 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
836 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
837 {
838 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
839 return IEMOP_RAISE_INVALID_OPCODE();
840 }
841
842 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
843 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
844 {
845 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
846 return IEMOP_RAISE_INVALID_OPCODE();
847 }
848
849 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
850 {
851 case 2: /* Aliased to /0 for the time being. */
852 case 4: /* Aliased to /0 for the time being. */
853 case 5: /* Aliased to /0 for the time being. */
854 case 6: /* Aliased to /0 for the time being. */
855 case 7: /* Aliased to /0 for the time being. */
856 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
857 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
858 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
859 IEM_NOT_REACHED_DEFAULT_CASE_RET();
860 }
861
862 IEM_MC_BEGIN(0, 1);
863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
866 /* Currently a NOP. */
867 NOREF(GCPtrEffSrc);
868 IEM_MC_ADVANCE_RIP();
869 IEM_MC_END();
870 return VINF_SUCCESS;
871}
872
873
874/** Opcode 0x0f 0x0e. */
875FNIEMOP_DEF(iemOp_femms)
876{
877 IEMOP_MNEMONIC(femms, "femms");
878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
879
880 IEM_MC_BEGIN(0,0);
881 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
882 IEM_MC_MAYBE_RAISE_FPU_XCPT();
883 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
884 IEM_MC_FPU_FROM_MMX_MODE();
885 IEM_MC_ADVANCE_RIP();
886 IEM_MC_END();
887 return VINF_SUCCESS;
888}
889
890
891/** Opcode 0x0f 0x0f. */
892FNIEMOP_DEF(iemOp_3Dnow)
893{
894 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
895 {
896 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
897 return IEMOP_RAISE_INVALID_OPCODE();
898 }
899
900#ifdef IEM_WITH_3DNOW
901 /* This is pretty sparse, use switch instead of table. */
902 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
903 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
904#else
905 IEMOP_BITCH_ABOUT_STUB();
906 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
907#endif
908}
909
910
911/**
912 * @opcode 0x10
913 * @oppfx none
914 * @opcpuid sse
915 * @opgroup og_sse_simdfp_datamove
916 * @opxcpttype 4UA
917 * @optest op1=1 op2=2 -> op1=2
918 * @optest op1=0 op2=-22 -> op1=-22
919 */
920FNIEMOP_DEF(iemOp_movups_Vps_Wps)
921{
922 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
923 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
924 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
925 {
926 /*
927 * Register, register.
928 */
929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
930 IEM_MC_BEGIN(0, 0);
931 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
932 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
933 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
934 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
935 IEM_MC_ADVANCE_RIP();
936 IEM_MC_END();
937 }
938 else
939 {
940 /*
941 * Memory, register.
942 */
943 IEM_MC_BEGIN(0, 2);
944 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
946
947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
949 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
950 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
951
952 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
953 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
954
955 IEM_MC_ADVANCE_RIP();
956 IEM_MC_END();
957 }
958 return VINF_SUCCESS;
959
960}
961
962
963/**
964 * @opcode 0x10
965 * @oppfx 0x66
966 * @opcpuid sse2
967 * @opgroup og_sse2_pcksclr_datamove
968 * @opxcpttype 4UA
969 * @optest op1=1 op2=2 -> op1=2
970 * @optest op1=0 op2=-42 -> op1=-42
971 */
972FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
973{
974 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
975 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
976 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
977 {
978 /*
979 * Register, register.
980 */
981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
982 IEM_MC_BEGIN(0, 0);
983 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
984 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
985 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
986 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
987 IEM_MC_ADVANCE_RIP();
988 IEM_MC_END();
989 }
990 else
991 {
992 /*
993 * Memory, register.
994 */
995 IEM_MC_BEGIN(0, 2);
996 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
997 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
998
999 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1001 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1002 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1003
1004 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1005 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1006
1007 IEM_MC_ADVANCE_RIP();
1008 IEM_MC_END();
1009 }
1010 return VINF_SUCCESS;
1011}
1012
1013
1014/**
1015 * @opcode 0x10
1016 * @oppfx 0xf3
1017 * @opcpuid sse
1018 * @opgroup og_sse_simdfp_datamove
1019 * @opxcpttype 5
1020 * @optest op1=1 op2=2 -> op1=2
1021 * @optest op1=0 op2=-22 -> op1=-22
1022 */
1023FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1024{
1025 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1026 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1027 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1028 {
1029 /*
1030 * Register, register.
1031 */
1032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1033 IEM_MC_BEGIN(0, 1);
1034 IEM_MC_LOCAL(uint32_t, uSrc);
1035
1036 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1037 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1038 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1039 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1040
1041 IEM_MC_ADVANCE_RIP();
1042 IEM_MC_END();
1043 }
1044 else
1045 {
1046 /*
1047 * Memory, register.
1048 */
1049 IEM_MC_BEGIN(0, 2);
1050 IEM_MC_LOCAL(uint32_t, uSrc);
1051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1052
1053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1055 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1056 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1057
1058 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1059 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1060
1061 IEM_MC_ADVANCE_RIP();
1062 IEM_MC_END();
1063 }
1064 return VINF_SUCCESS;
1065}
1066
1067
1068/**
1069 * @opcode 0x10
1070 * @oppfx 0xf2
1071 * @opcpuid sse2
1072 * @opgroup og_sse2_pcksclr_datamove
1073 * @opxcpttype 5
1074 * @optest op1=1 op2=2 -> op1=2
1075 * @optest op1=0 op2=-42 -> op1=-42
1076 */
1077FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1078{
1079 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1081 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1082 {
1083 /*
1084 * Register, register.
1085 */
1086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1087 IEM_MC_BEGIN(0, 1);
1088 IEM_MC_LOCAL(uint64_t, uSrc);
1089
1090 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1091 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1092 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1093 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1094
1095 IEM_MC_ADVANCE_RIP();
1096 IEM_MC_END();
1097 }
1098 else
1099 {
1100 /*
1101 * Memory, register.
1102 */
1103 IEM_MC_BEGIN(0, 2);
1104 IEM_MC_LOCAL(uint64_t, uSrc);
1105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1106
1107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1109 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1110 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1111
1112 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1113 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1114
1115 IEM_MC_ADVANCE_RIP();
1116 IEM_MC_END();
1117 }
1118 return VINF_SUCCESS;
1119}
1120
1121
1122/**
1123 * @opcode 0x11
1124 * @oppfx none
1125 * @opcpuid sse
1126 * @opgroup og_sse_simdfp_datamove
1127 * @opxcpttype 4UA
1128 * @optest op1=1 op2=2 -> op1=2
1129 * @optest op1=0 op2=-42 -> op1=-42
1130 */
1131FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1132{
1133 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1134 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1135 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1136 {
1137 /*
1138 * Register, register.
1139 */
1140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1141 IEM_MC_BEGIN(0, 0);
1142 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1143 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1144 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1145 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1146 IEM_MC_ADVANCE_RIP();
1147 IEM_MC_END();
1148 }
1149 else
1150 {
1151 /*
1152 * Memory, register.
1153 */
1154 IEM_MC_BEGIN(0, 2);
1155 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1157
1158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1160 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1161 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1162
1163 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1164 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1165
1166 IEM_MC_ADVANCE_RIP();
1167 IEM_MC_END();
1168 }
1169 return VINF_SUCCESS;
1170}
1171
1172
1173/**
1174 * @opcode 0x11
1175 * @oppfx 0x66
1176 * @opcpuid sse2
1177 * @opgroup og_sse2_pcksclr_datamove
1178 * @opxcpttype 4UA
1179 * @optest op1=1 op2=2 -> op1=2
1180 * @optest op1=0 op2=-42 -> op1=-42
1181 */
1182FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1183{
1184 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1187 {
1188 /*
1189 * Register, register.
1190 */
1191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1192 IEM_MC_BEGIN(0, 0);
1193 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1194 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1195 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1196 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1197 IEM_MC_ADVANCE_RIP();
1198 IEM_MC_END();
1199 }
1200 else
1201 {
1202 /*
1203 * Memory, register.
1204 */
1205 IEM_MC_BEGIN(0, 2);
1206 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1208
1209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1211 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1212 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1213
1214 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1215 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1216
1217 IEM_MC_ADVANCE_RIP();
1218 IEM_MC_END();
1219 }
1220 return VINF_SUCCESS;
1221}
1222
1223
1224/**
1225 * @opcode 0x11
1226 * @oppfx 0xf3
1227 * @opcpuid sse
1228 * @opgroup og_sse_simdfp_datamove
1229 * @opxcpttype 5
1230 * @optest op1=1 op2=2 -> op1=2
1231 * @optest op1=0 op2=-22 -> op1=-22
1232 */
1233FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1234{
1235 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1236 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1237 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1238 {
1239 /*
1240 * Register, register.
1241 */
1242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1243 IEM_MC_BEGIN(0, 1);
1244 IEM_MC_LOCAL(uint32_t, uSrc);
1245
1246 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1247 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1248 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1249 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1250
1251 IEM_MC_ADVANCE_RIP();
1252 IEM_MC_END();
1253 }
1254 else
1255 {
1256 /*
1257 * Memory, register.
1258 */
1259 IEM_MC_BEGIN(0, 2);
1260 IEM_MC_LOCAL(uint32_t, uSrc);
1261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1262
1263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1265 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1266 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1267
1268 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1269 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1270
1271 IEM_MC_ADVANCE_RIP();
1272 IEM_MC_END();
1273 }
1274 return VINF_SUCCESS;
1275}
1276
1277
1278/**
1279 * @opcode 0x11
1280 * @oppfx 0xf2
1281 * @opcpuid sse2
1282 * @opgroup og_sse2_pcksclr_datamove
1283 * @opxcpttype 5
1284 * @optest op1=1 op2=2 -> op1=2
1285 * @optest op1=0 op2=-42 -> op1=-42
1286 */
1287FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1288{
1289 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1291 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1292 {
1293 /*
1294 * Register, register.
1295 */
1296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1297 IEM_MC_BEGIN(0, 1);
1298 IEM_MC_LOCAL(uint64_t, uSrc);
1299
1300 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1301 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1302 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1303 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1304
1305 IEM_MC_ADVANCE_RIP();
1306 IEM_MC_END();
1307 }
1308 else
1309 {
1310 /*
1311 * Memory, register.
1312 */
1313 IEM_MC_BEGIN(0, 2);
1314 IEM_MC_LOCAL(uint64_t, uSrc);
1315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1316
1317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1319 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1320 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1321
1322 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1323 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1324
1325 IEM_MC_ADVANCE_RIP();
1326 IEM_MC_END();
1327 }
1328 return VINF_SUCCESS;
1329}
1330
1331
1332FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1333{
1334 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1335 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1336 {
1337 /**
1338 * @opcode 0x12
1339 * @opcodesub 11 mr/reg
1340 * @oppfx none
1341 * @opcpuid sse
1342 * @opgroup og_sse_simdfp_datamove
1343 * @opxcpttype 5
1344 * @optest op1=1 op2=2 -> op1=2
1345 * @optest op1=0 op2=-42 -> op1=-42
1346 */
1347 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1348
1349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1350 IEM_MC_BEGIN(0, 1);
1351 IEM_MC_LOCAL(uint64_t, uSrc);
1352
1353 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1354 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1355 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1356 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1357
1358 IEM_MC_ADVANCE_RIP();
1359 IEM_MC_END();
1360 }
1361 else
1362 {
1363 /**
1364 * @opdone
1365 * @opcode 0x12
1366 * @opcodesub !11 mr/reg
1367 * @oppfx none
1368 * @opcpuid sse
1369 * @opgroup og_sse_simdfp_datamove
1370 * @opxcpttype 5
1371 * @optest op1=1 op2=2 -> op1=2
1372 * @optest op1=0 op2=-42 -> op1=-42
1373 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1374 */
1375 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1376
1377 IEM_MC_BEGIN(0, 2);
1378 IEM_MC_LOCAL(uint64_t, uSrc);
1379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1380
1381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1383 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1384 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1385
1386 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1387 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1388
1389 IEM_MC_ADVANCE_RIP();
1390 IEM_MC_END();
1391 }
1392 return VINF_SUCCESS;
1393}
1394
1395
1396/**
1397 * @opcode 0x12
1398 * @opcodesub !11 mr/reg
1399 * @oppfx 0x66
1400 * @opcpuid sse2
1401 * @opgroup og_sse2_pcksclr_datamove
1402 * @opxcpttype 5
1403 * @optest op1=1 op2=2 -> op1=2
1404 * @optest op1=0 op2=-42 -> op1=-42
1405 */
1406FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1407{
1408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1409 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1410 {
1411 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1412
1413 IEM_MC_BEGIN(0, 2);
1414 IEM_MC_LOCAL(uint64_t, uSrc);
1415 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1416
1417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1419 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1420 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1421
1422 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1423 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1424
1425 IEM_MC_ADVANCE_RIP();
1426 IEM_MC_END();
1427 return VINF_SUCCESS;
1428 }
1429
1430 /**
1431 * @opdone
1432 * @opmnemonic ud660f12m3
1433 * @opcode 0x12
1434 * @opcodesub 11 mr/reg
1435 * @oppfx 0x66
1436 * @opunused immediate
1437 * @opcpuid sse
1438 * @optest ->
1439 */
1440 return IEMOP_RAISE_INVALID_OPCODE();
1441}
1442
1443
1444/**
1445 * @opcode 0x12
1446 * @oppfx 0xf3
1447 * @opcpuid sse3
1448 * @opgroup og_sse3_pcksclr_datamove
1449 * @opxcpttype 4
1450 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1451 * op1=0x00000002000000020000000100000001
1452 */
1453FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1454{
1455 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1457 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1458 {
1459 /*
1460 * Register, register.
1461 */
1462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1463 IEM_MC_BEGIN(2, 0);
1464 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1465 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1466
1467 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1468 IEM_MC_PREPARE_SSE_USAGE();
1469
1470 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1471 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1472 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1473
1474 IEM_MC_ADVANCE_RIP();
1475 IEM_MC_END();
1476 }
1477 else
1478 {
1479 /*
1480 * Register, memory.
1481 */
1482 IEM_MC_BEGIN(2, 2);
1483 IEM_MC_LOCAL(RTUINT128U, uSrc);
1484 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1485 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1486 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1487
1488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1490 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1491 IEM_MC_PREPARE_SSE_USAGE();
1492
1493 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1494 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1495 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1496
1497 IEM_MC_ADVANCE_RIP();
1498 IEM_MC_END();
1499 }
1500 return VINF_SUCCESS;
1501}
1502
1503
1504/**
1505 * @opcode 0x12
1506 * @oppfx 0xf2
1507 * @opcpuid sse3
1508 * @opgroup og_sse3_pcksclr_datamove
1509 * @opxcpttype 5
1510 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1511 * op1=0x22222222111111112222222211111111
1512 */
1513FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1514{
1515 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1516 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1517 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1518 {
1519 /*
1520 * Register, register.
1521 */
1522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1523 IEM_MC_BEGIN(2, 0);
1524 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1525 IEM_MC_ARG(uint64_t, uSrc, 1);
1526
1527 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1528 IEM_MC_PREPARE_SSE_USAGE();
1529
1530 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1531 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1532 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1533
1534 IEM_MC_ADVANCE_RIP();
1535 IEM_MC_END();
1536 }
1537 else
1538 {
1539 /*
1540 * Register, memory.
1541 */
1542 IEM_MC_BEGIN(2, 2);
1543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1544 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1545 IEM_MC_ARG(uint64_t, uSrc, 1);
1546
1547 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1549 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1550 IEM_MC_PREPARE_SSE_USAGE();
1551
1552 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1553 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1554 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1555
1556 IEM_MC_ADVANCE_RIP();
1557 IEM_MC_END();
1558 }
1559 return VINF_SUCCESS;
1560}
1561
1562
1563/**
1564 * @opcode 0x13
1565 * @opcodesub !11 mr/reg
1566 * @oppfx none
1567 * @opcpuid sse
1568 * @opgroup og_sse_simdfp_datamove
1569 * @opxcpttype 5
1570 * @optest op1=1 op2=2 -> op1=2
1571 * @optest op1=0 op2=-42 -> op1=-42
1572 */
1573FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1574{
1575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1576 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1577 {
1578 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1579
1580 IEM_MC_BEGIN(0, 2);
1581 IEM_MC_LOCAL(uint64_t, uSrc);
1582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1583
1584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1586 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1587 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1588
1589 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1590 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1591
1592 IEM_MC_ADVANCE_RIP();
1593 IEM_MC_END();
1594 return VINF_SUCCESS;
1595 }
1596
1597 /**
1598 * @opdone
1599 * @opmnemonic ud0f13m3
1600 * @opcode 0x13
1601 * @opcodesub 11 mr/reg
1602 * @oppfx none
1603 * @opunused immediate
1604 * @opcpuid sse
1605 * @optest ->
1606 */
1607 return IEMOP_RAISE_INVALID_OPCODE();
1608}
1609
1610
1611/**
1612 * @opcode 0x13
1613 * @opcodesub !11 mr/reg
1614 * @oppfx 0x66
1615 * @opcpuid sse2
1616 * @opgroup og_sse2_pcksclr_datamove
1617 * @opxcpttype 5
1618 * @optest op1=1 op2=2 -> op1=2
1619 * @optest op1=0 op2=-42 -> op1=-42
1620 */
1621FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1622{
1623 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1624 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1625 {
1626 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1627 IEM_MC_BEGIN(0, 2);
1628 IEM_MC_LOCAL(uint64_t, uSrc);
1629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1630
1631 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1633 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1634 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1635
1636 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1637 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1638
1639 IEM_MC_ADVANCE_RIP();
1640 IEM_MC_END();
1641 return VINF_SUCCESS;
1642 }
1643
1644 /**
1645 * @opdone
1646 * @opmnemonic ud660f13m3
1647 * @opcode 0x13
1648 * @opcodesub 11 mr/reg
1649 * @oppfx 0x66
1650 * @opunused immediate
1651 * @opcpuid sse
1652 * @optest ->
1653 */
1654 return IEMOP_RAISE_INVALID_OPCODE();
1655}
1656
1657
1658/**
1659 * @opmnemonic udf30f13
1660 * @opcode 0x13
1661 * @oppfx 0xf3
1662 * @opunused intel-modrm
1663 * @opcpuid sse
1664 * @optest ->
1665 * @opdone
1666 */
1667
1668/**
1669 * @opmnemonic udf20f13
1670 * @opcode 0x13
1671 * @oppfx 0xf2
1672 * @opunused intel-modrm
1673 * @opcpuid sse
1674 * @optest ->
1675 * @opdone
1676 */
1677
1678/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1679FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1680/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1681FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1682
1683/**
1684 * @opdone
1685 * @opmnemonic udf30f14
1686 * @opcode 0x14
1687 * @oppfx 0xf3
1688 * @opunused intel-modrm
1689 * @opcpuid sse
1690 * @optest ->
1691 * @opdone
1692 */
1693
1694/**
1695 * @opmnemonic udf20f14
1696 * @opcode 0x14
1697 * @oppfx 0xf2
1698 * @opunused intel-modrm
1699 * @opcpuid sse
1700 * @optest ->
1701 * @opdone
1702 */
1703
1704/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1705FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1706/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1707FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1708/* Opcode 0xf3 0x0f 0x15 - invalid */
1709/* Opcode 0xf2 0x0f 0x15 - invalid */
1710
1711/**
1712 * @opdone
1713 * @opmnemonic udf30f15
1714 * @opcode 0x15
1715 * @oppfx 0xf3
1716 * @opunused intel-modrm
1717 * @opcpuid sse
1718 * @optest ->
1719 * @opdone
1720 */
1721
1722/**
1723 * @opmnemonic udf20f15
1724 * @opcode 0x15
1725 * @oppfx 0xf2
1726 * @opunused intel-modrm
1727 * @opcpuid sse
1728 * @optest ->
1729 * @opdone
1730 */
1731
1732FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1733{
1734 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1735 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1736 {
1737 /**
1738 * @opcode 0x16
1739 * @opcodesub 11 mr/reg
1740 * @oppfx none
1741 * @opcpuid sse
1742 * @opgroup og_sse_simdfp_datamove
1743 * @opxcpttype 5
1744 * @optest op1=1 op2=2 -> op1=2
1745 * @optest op1=0 op2=-42 -> op1=-42
1746 */
1747 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1748
1749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1750 IEM_MC_BEGIN(0, 1);
1751 IEM_MC_LOCAL(uint64_t, uSrc);
1752
1753 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1754 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1755 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1756 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1757
1758 IEM_MC_ADVANCE_RIP();
1759 IEM_MC_END();
1760 }
1761 else
1762 {
1763 /**
1764 * @opdone
1765 * @opcode 0x16
1766 * @opcodesub !11 mr/reg
1767 * @oppfx none
1768 * @opcpuid sse
1769 * @opgroup og_sse_simdfp_datamove
1770 * @opxcpttype 5
1771 * @optest op1=1 op2=2 -> op1=2
1772 * @optest op1=0 op2=-42 -> op1=-42
1773 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1774 */
1775 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1776
1777 IEM_MC_BEGIN(0, 2);
1778 IEM_MC_LOCAL(uint64_t, uSrc);
1779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1780
1781 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1783 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1784 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1785
1786 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1787 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1788
1789 IEM_MC_ADVANCE_RIP();
1790 IEM_MC_END();
1791 }
1792 return VINF_SUCCESS;
1793}
1794
1795
1796/**
1797 * @opcode 0x16
1798 * @opcodesub !11 mr/reg
1799 * @oppfx 0x66
1800 * @opcpuid sse2
1801 * @opgroup og_sse2_pcksclr_datamove
1802 * @opxcpttype 5
1803 * @optest op1=1 op2=2 -> op1=2
1804 * @optest op1=0 op2=-42 -> op1=-42
1805 */
1806FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1807{
1808 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1809 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1810 {
1811 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1812 IEM_MC_BEGIN(0, 2);
1813 IEM_MC_LOCAL(uint64_t, uSrc);
1814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1815
1816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1818 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1819 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1820
1821 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1822 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1823
1824 IEM_MC_ADVANCE_RIP();
1825 IEM_MC_END();
1826 return VINF_SUCCESS;
1827 }
1828
1829 /**
1830 * @opdone
1831 * @opmnemonic ud660f16m3
1832 * @opcode 0x16
1833 * @opcodesub 11 mr/reg
1834 * @oppfx 0x66
1835 * @opunused immediate
1836 * @opcpuid sse
1837 * @optest ->
1838 */
1839 return IEMOP_RAISE_INVALID_OPCODE();
1840}
1841
1842
1843/**
1844 * @opcode 0x16
1845 * @oppfx 0xf3
1846 * @opcpuid sse3
1847 * @opgroup og_sse3_pcksclr_datamove
1848 * @opxcpttype 4
1849 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1850 * op1=0x00000002000000020000000100000001
1851 */
1852FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1853{
1854 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1855 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1856 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1857 {
1858 /*
1859 * Register, register.
1860 */
1861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1862 IEM_MC_BEGIN(2, 0);
1863 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1864 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1865
1866 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1867 IEM_MC_PREPARE_SSE_USAGE();
1868
1869 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1870 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1871 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1872
1873 IEM_MC_ADVANCE_RIP();
1874 IEM_MC_END();
1875 }
1876 else
1877 {
1878 /*
1879 * Register, memory.
1880 */
1881 IEM_MC_BEGIN(2, 2);
1882 IEM_MC_LOCAL(RTUINT128U, uSrc);
1883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1884 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1885 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1886
1887 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1889 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1890 IEM_MC_PREPARE_SSE_USAGE();
1891
1892 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1893 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1894 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1895
1896 IEM_MC_ADVANCE_RIP();
1897 IEM_MC_END();
1898 }
1899 return VINF_SUCCESS;
1900}
1901
1902/**
1903 * @opdone
1904 * @opmnemonic udf30f16
1905 * @opcode 0x16
1906 * @oppfx 0xf2
1907 * @opunused intel-modrm
1908 * @opcpuid sse
1909 * @optest ->
1910 * @opdone
1911 */
1912
1913
1914/**
1915 * @opcode 0x17
1916 * @opcodesub !11 mr/reg
1917 * @oppfx none
1918 * @opcpuid sse
1919 * @opgroup og_sse_simdfp_datamove
1920 * @opxcpttype 5
1921 * @optest op1=1 op2=2 -> op1=2
1922 * @optest op1=0 op2=-42 -> op1=-42
1923 */
1924FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
1925{
1926 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1927 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1928 {
1929 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1930
1931 IEM_MC_BEGIN(0, 2);
1932 IEM_MC_LOCAL(uint64_t, uSrc);
1933 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1934
1935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1937 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1938 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1939
1940 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1941 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1942
1943 IEM_MC_ADVANCE_RIP();
1944 IEM_MC_END();
1945 return VINF_SUCCESS;
1946 }
1947
1948 /**
1949 * @opdone
1950 * @opmnemonic ud0f17m3
1951 * @opcode 0x17
1952 * @opcodesub 11 mr/reg
1953 * @oppfx none
1954 * @opunused immediate
1955 * @opcpuid sse
1956 * @optest ->
1957 */
1958 return IEMOP_RAISE_INVALID_OPCODE();
1959}
1960
1961
1962/**
1963 * @opcode 0x17
1964 * @opcodesub !11 mr/reg
1965 * @oppfx 0x66
1966 * @opcpuid sse2
1967 * @opgroup og_sse2_pcksclr_datamove
1968 * @opxcpttype 5
1969 * @optest op1=1 op2=2 -> op1=2
1970 * @optest op1=0 op2=-42 -> op1=-42
1971 */
1972FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
1973{
1974 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1975 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1976 {
1977 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1978
1979 IEM_MC_BEGIN(0, 2);
1980 IEM_MC_LOCAL(uint64_t, uSrc);
1981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1982
1983 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1985 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1986 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1987
1988 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1989 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1990
1991 IEM_MC_ADVANCE_RIP();
1992 IEM_MC_END();
1993 return VINF_SUCCESS;
1994 }
1995
1996 /**
1997 * @opdone
1998 * @opmnemonic ud660f17m3
1999 * @opcode 0x17
2000 * @opcodesub 11 mr/reg
2001 * @oppfx 0x66
2002 * @opunused immediate
2003 * @opcpuid sse
2004 * @optest ->
2005 */
2006 return IEMOP_RAISE_INVALID_OPCODE();
2007}
2008
2009
2010/**
2011 * @opdone
2012 * @opmnemonic udf30f17
2013 * @opcode 0x17
2014 * @oppfx 0xf3
2015 * @opunused intel-modrm
2016 * @opcpuid sse
2017 * @optest ->
2018 * @opdone
2019 */
2020
2021/**
2022 * @opmnemonic udf20f17
2023 * @opcode 0x17
2024 * @oppfx 0xf2
2025 * @opunused intel-modrm
2026 * @opcpuid sse
2027 * @optest ->
2028 * @opdone
2029 */
2030
2031
2032/** Opcode 0x0f 0x18. */
2033FNIEMOP_DEF(iemOp_prefetch_Grp16)
2034{
2035 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2036 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2037 {
2038 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2039 {
2040 case 4: /* Aliased to /0 for the time being according to AMD. */
2041 case 5: /* Aliased to /0 for the time being according to AMD. */
2042 case 6: /* Aliased to /0 for the time being according to AMD. */
2043 case 7: /* Aliased to /0 for the time being according to AMD. */
2044 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2045 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2046 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2047 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2048 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2049 }
2050
2051 IEM_MC_BEGIN(0, 1);
2052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2055 /* Currently a NOP. */
2056 NOREF(GCPtrEffSrc);
2057 IEM_MC_ADVANCE_RIP();
2058 IEM_MC_END();
2059 return VINF_SUCCESS;
2060 }
2061
2062 return IEMOP_RAISE_INVALID_OPCODE();
2063}
2064
2065
2066/** Opcode 0x0f 0x19..0x1f. */
2067FNIEMOP_DEF(iemOp_nop_Ev)
2068{
2069 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2070 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2071 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2072 {
2073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2074 IEM_MC_BEGIN(0, 0);
2075 IEM_MC_ADVANCE_RIP();
2076 IEM_MC_END();
2077 }
2078 else
2079 {
2080 IEM_MC_BEGIN(0, 1);
2081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2084 /* Currently a NOP. */
2085 NOREF(GCPtrEffSrc);
2086 IEM_MC_ADVANCE_RIP();
2087 IEM_MC_END();
2088 }
2089 return VINF_SUCCESS;
2090}
2091
2092
2093/** Opcode 0x0f 0x20. */
2094FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2095{
2096 /* mod is ignored, as is operand size overrides. */
2097 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2098 IEMOP_HLP_MIN_386();
2099 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2100 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2101 else
2102 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2103
2104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2105 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2106 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2107 {
2108 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2109 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2110 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2111 iCrReg |= 8;
2112 }
2113 switch (iCrReg)
2114 {
2115 case 0: case 2: case 3: case 4: case 8:
2116 break;
2117 default:
2118 return IEMOP_RAISE_INVALID_OPCODE();
2119 }
2120 IEMOP_HLP_DONE_DECODING();
2121
2122 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2123}
2124
2125
2126/** Opcode 0x0f 0x21. */
2127FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2128{
2129 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2130 IEMOP_HLP_MIN_386();
2131 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2133 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2134 return IEMOP_RAISE_INVALID_OPCODE();
2135 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2136 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2137 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2138}
2139
2140
2141/** Opcode 0x0f 0x22. */
2142FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2143{
2144 /* mod is ignored, as is operand size overrides. */
2145 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2146 IEMOP_HLP_MIN_386();
2147 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2148 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2149 else
2150 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2151
2152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2153 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2154 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2155 {
2156 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2157 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2158 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2159 iCrReg |= 8;
2160 }
2161 switch (iCrReg)
2162 {
2163 case 0: case 2: case 3: case 4: case 8:
2164 break;
2165 default:
2166 return IEMOP_RAISE_INVALID_OPCODE();
2167 }
2168 IEMOP_HLP_DONE_DECODING();
2169
2170 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2171}
2172
2173
2174/** Opcode 0x0f 0x23. */
2175FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2176{
2177 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2178 IEMOP_HLP_MIN_386();
2179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2181 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2182 return IEMOP_RAISE_INVALID_OPCODE();
2183 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2184 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2185 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2186}
2187
2188
2189/** Opcode 0x0f 0x24. */
2190FNIEMOP_DEF(iemOp_mov_Rd_Td)
2191{
2192 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2193 /** @todo works on 386 and 486. */
2194 /* The RM byte is not considered, see testcase. */
2195 return IEMOP_RAISE_INVALID_OPCODE();
2196}
2197
2198
2199/** Opcode 0x0f 0x26. */
2200FNIEMOP_DEF(iemOp_mov_Td_Rd)
2201{
2202 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2203 /** @todo works on 386 and 486. */
2204 /* The RM byte is not considered, see testcase. */
2205 return IEMOP_RAISE_INVALID_OPCODE();
2206}
2207
2208
2209/**
2210 * @opcode 0x28
2211 * @oppfx none
2212 * @opcpuid sse
2213 * @opgroup og_sse_simdfp_datamove
2214 * @opxcpttype 1
2215 * @optest op1=1 op2=2 -> op1=2
2216 * @optest op1=0 op2=-42 -> op1=-42
2217 */
2218FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2219{
2220 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2222 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2223 {
2224 /*
2225 * Register, register.
2226 */
2227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2228 IEM_MC_BEGIN(0, 0);
2229 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2230 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2231 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2232 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2233 IEM_MC_ADVANCE_RIP();
2234 IEM_MC_END();
2235 }
2236 else
2237 {
2238 /*
2239 * Register, memory.
2240 */
2241 IEM_MC_BEGIN(0, 2);
2242 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2244
2245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2247 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2248 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2249
2250 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2251 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2252
2253 IEM_MC_ADVANCE_RIP();
2254 IEM_MC_END();
2255 }
2256 return VINF_SUCCESS;
2257}
2258
2259/**
2260 * @opcode 0x28
2261 * @oppfx 66
2262 * @opcpuid sse2
2263 * @opgroup og_sse2_pcksclr_datamove
2264 * @opxcpttype 1
2265 * @optest op1=1 op2=2 -> op1=2
2266 * @optest op1=0 op2=-42 -> op1=-42
2267 */
2268FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2269{
2270 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2271 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2272 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2273 {
2274 /*
2275 * Register, register.
2276 */
2277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2278 IEM_MC_BEGIN(0, 0);
2279 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2280 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2281 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2282 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2283 IEM_MC_ADVANCE_RIP();
2284 IEM_MC_END();
2285 }
2286 else
2287 {
2288 /*
2289 * Register, memory.
2290 */
2291 IEM_MC_BEGIN(0, 2);
2292 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2294
2295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2297 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2298 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2299
2300 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2301 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2302
2303 IEM_MC_ADVANCE_RIP();
2304 IEM_MC_END();
2305 }
2306 return VINF_SUCCESS;
2307}
2308
2309/* Opcode 0xf3 0x0f 0x28 - invalid */
2310/* Opcode 0xf2 0x0f 0x28 - invalid */
2311
2312/**
2313 * @opcode 0x29
2314 * @oppfx none
2315 * @opcpuid sse
2316 * @opgroup og_sse_simdfp_datamove
2317 * @opxcpttype 1
2318 * @optest op1=1 op2=2 -> op1=2
2319 * @optest op1=0 op2=-42 -> op1=-42
2320 */
2321FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2322{
2323 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2324 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2325 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2326 {
2327 /*
2328 * Register, register.
2329 */
2330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2331 IEM_MC_BEGIN(0, 0);
2332 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2333 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2334 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2335 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2336 IEM_MC_ADVANCE_RIP();
2337 IEM_MC_END();
2338 }
2339 else
2340 {
2341 /*
2342 * Memory, register.
2343 */
2344 IEM_MC_BEGIN(0, 2);
2345 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2347
2348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2350 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2351 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2352
2353 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2354 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2355
2356 IEM_MC_ADVANCE_RIP();
2357 IEM_MC_END();
2358 }
2359 return VINF_SUCCESS;
2360}
2361
2362/**
2363 * @opcode 0x29
2364 * @oppfx 66
2365 * @opcpuid sse2
2366 * @opgroup og_sse2_pcksclr_datamove
2367 * @opxcpttype 1
2368 * @optest op1=1 op2=2 -> op1=2
2369 * @optest op1=0 op2=-42 -> op1=-42
2370 */
2371FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2372{
2373 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2374 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2375 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2376 {
2377 /*
2378 * Register, register.
2379 */
2380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2381 IEM_MC_BEGIN(0, 0);
2382 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2383 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2384 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2385 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2386 IEM_MC_ADVANCE_RIP();
2387 IEM_MC_END();
2388 }
2389 else
2390 {
2391 /*
2392 * Memory, register.
2393 */
2394 IEM_MC_BEGIN(0, 2);
2395 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2397
2398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2400 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2401 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2402
2403 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2404 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2405
2406 IEM_MC_ADVANCE_RIP();
2407 IEM_MC_END();
2408 }
2409 return VINF_SUCCESS;
2410}
2411
2412/* Opcode 0xf3 0x0f 0x29 - invalid */
2413/* Opcode 0xf2 0x0f 0x29 - invalid */
2414
2415
2416/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2417FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2418/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2419FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2420/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2421FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2422/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2423FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2424
2425
2426/**
2427 * @opcode 0x2b
2428 * @opcodesub !11 mr/reg
2429 * @oppfx none
2430 * @opcpuid sse
2431 * @opgroup og_sse1_cachect
2432 * @opxcpttype 1
2433 * @optest op1=1 op2=2 -> op1=2
2434 * @optest op1=0 op2=-42 -> op1=-42
2435 */
2436FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2437{
2438 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2439 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2440 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2441 {
2442 /*
2443 * memory, register.
2444 */
2445 IEM_MC_BEGIN(0, 2);
2446 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2448
2449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2451 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2452 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2453
2454 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2455 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2456
2457 IEM_MC_ADVANCE_RIP();
2458 IEM_MC_END();
2459 }
2460 /* The register, register encoding is invalid. */
2461 else
2462 return IEMOP_RAISE_INVALID_OPCODE();
2463 return VINF_SUCCESS;
2464}
2465
2466/**
2467 * @opcode 0x2b
2468 * @opcodesub !11 mr/reg
2469 * @oppfx 0x66
2470 * @opcpuid sse2
2471 * @opgroup og_sse2_cachect
2472 * @opxcpttype 1
2473 * @optest op1=1 op2=2 -> op1=2
2474 * @optest op1=0 op2=-42 -> op1=-42
2475 */
2476FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2477{
2478 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2479 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2480 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2481 {
2482 /*
2483 * memory, register.
2484 */
2485 IEM_MC_BEGIN(0, 2);
2486 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2488
2489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2491 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2492 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2493
2494 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2495 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2496
2497 IEM_MC_ADVANCE_RIP();
2498 IEM_MC_END();
2499 }
2500 /* The register, register encoding is invalid. */
2501 else
2502 return IEMOP_RAISE_INVALID_OPCODE();
2503 return VINF_SUCCESS;
2504}
2505/* Opcode 0xf3 0x0f 0x2b - invalid */
2506/* Opcode 0xf2 0x0f 0x2b - invalid */
2507
2508
2509/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2510FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2511/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2512FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2513/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2514FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2515/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2516FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2517
2518/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2519FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2520/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2521FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2522/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2523FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2524/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2525FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2526
2527/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2528FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2529/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2530FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2531/* Opcode 0xf3 0x0f 0x2e - invalid */
2532/* Opcode 0xf2 0x0f 0x2e - invalid */
2533
2534/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2535FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2536/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2537FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2538/* Opcode 0xf3 0x0f 0x2f - invalid */
2539/* Opcode 0xf2 0x0f 0x2f - invalid */
2540
2541/** Opcode 0x0f 0x30. */
2542FNIEMOP_DEF(iemOp_wrmsr)
2543{
2544 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2546 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2547}
2548
2549
2550/** Opcode 0x0f 0x31. */
2551FNIEMOP_DEF(iemOp_rdtsc)
2552{
2553 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2555 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2556}
2557
2558
2559/** Opcode 0x0f 0x33. */
2560FNIEMOP_DEF(iemOp_rdmsr)
2561{
2562 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2564 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2565}
2566
2567
2568/** Opcode 0x0f 0x34. */
2569FNIEMOP_DEF(iemOp_rdpmc)
2570{
2571 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2573 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2574}
2575
2576
2577/** Opcode 0x0f 0x34. */
2578FNIEMOP_STUB(iemOp_sysenter);
2579/** Opcode 0x0f 0x35. */
2580FNIEMOP_STUB(iemOp_sysexit);
2581/** Opcode 0x0f 0x37. */
2582FNIEMOP_STUB(iemOp_getsec);
2583
2584
2585/** Opcode 0x0f 0x38. */
2586FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2587{
2588#ifdef IEM_WITH_THREE_0F_38
2589 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2590 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2591#else
2592 IEMOP_BITCH_ABOUT_STUB();
2593 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2594#endif
2595}
2596
2597
2598/** Opcode 0x0f 0x3a. */
2599FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2600{
2601#ifdef IEM_WITH_THREE_0F_3A
2602 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2603 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2604#else
2605 IEMOP_BITCH_ABOUT_STUB();
2606 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2607#endif
2608}
2609
2610
2611/**
2612 * Implements a conditional move.
2613 *
2614 * Wish there was an obvious way to do this where we could share and reduce
2615 * code bloat.
2616 *
2617 * @param a_Cnd The conditional "microcode" operation.
2618 */
2619#define CMOV_X(a_Cnd) \
2620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2621 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2622 { \
2623 switch (pVCpu->iem.s.enmEffOpSize) \
2624 { \
2625 case IEMMODE_16BIT: \
2626 IEM_MC_BEGIN(0, 1); \
2627 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2628 a_Cnd { \
2629 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2630 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2631 } IEM_MC_ENDIF(); \
2632 IEM_MC_ADVANCE_RIP(); \
2633 IEM_MC_END(); \
2634 return VINF_SUCCESS; \
2635 \
2636 case IEMMODE_32BIT: \
2637 IEM_MC_BEGIN(0, 1); \
2638 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2639 a_Cnd { \
2640 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2641 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2642 } IEM_MC_ELSE() { \
2643 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2644 } IEM_MC_ENDIF(); \
2645 IEM_MC_ADVANCE_RIP(); \
2646 IEM_MC_END(); \
2647 return VINF_SUCCESS; \
2648 \
2649 case IEMMODE_64BIT: \
2650 IEM_MC_BEGIN(0, 1); \
2651 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2652 a_Cnd { \
2653 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2654 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2655 } IEM_MC_ENDIF(); \
2656 IEM_MC_ADVANCE_RIP(); \
2657 IEM_MC_END(); \
2658 return VINF_SUCCESS; \
2659 \
2660 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2661 } \
2662 } \
2663 else \
2664 { \
2665 switch (pVCpu->iem.s.enmEffOpSize) \
2666 { \
2667 case IEMMODE_16BIT: \
2668 IEM_MC_BEGIN(0, 2); \
2669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2670 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2672 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2673 a_Cnd { \
2674 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2675 } IEM_MC_ENDIF(); \
2676 IEM_MC_ADVANCE_RIP(); \
2677 IEM_MC_END(); \
2678 return VINF_SUCCESS; \
2679 \
2680 case IEMMODE_32BIT: \
2681 IEM_MC_BEGIN(0, 2); \
2682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2683 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2685 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2686 a_Cnd { \
2687 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2688 } IEM_MC_ELSE() { \
2689 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2690 } IEM_MC_ENDIF(); \
2691 IEM_MC_ADVANCE_RIP(); \
2692 IEM_MC_END(); \
2693 return VINF_SUCCESS; \
2694 \
2695 case IEMMODE_64BIT: \
2696 IEM_MC_BEGIN(0, 2); \
2697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2698 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2700 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2701 a_Cnd { \
2702 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2703 } IEM_MC_ENDIF(); \
2704 IEM_MC_ADVANCE_RIP(); \
2705 IEM_MC_END(); \
2706 return VINF_SUCCESS; \
2707 \
2708 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2709 } \
2710 } do {} while (0)
2711
2712
2713
2714/** Opcode 0x0f 0x40. */
2715FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2716{
2717 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2718 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2719}
2720
2721
2722/** Opcode 0x0f 0x41. */
2723FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2724{
2725 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2726 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2727}
2728
2729
2730/** Opcode 0x0f 0x42. */
2731FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2732{
2733 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2734 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2735}
2736
2737
2738/** Opcode 0x0f 0x43. */
2739FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2740{
2741 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2742 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2743}
2744
2745
2746/** Opcode 0x0f 0x44. */
2747FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2748{
2749 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2750 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2751}
2752
2753
2754/** Opcode 0x0f 0x45. */
2755FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2756{
2757 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2758 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2759}
2760
2761
2762/** Opcode 0x0f 0x46. */
2763FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2764{
2765 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2766 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2767}
2768
2769
2770/** Opcode 0x0f 0x47. */
2771FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2772{
2773 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2774 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2775}
2776
2777
2778/** Opcode 0x0f 0x48. */
2779FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2780{
2781 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2782 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2783}
2784
2785
2786/** Opcode 0x0f 0x49. */
2787FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2788{
2789 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2790 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2791}
2792
2793
2794/** Opcode 0x0f 0x4a. */
2795FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2796{
2797 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2798 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2799}
2800
2801
2802/** Opcode 0x0f 0x4b. */
2803FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2804{
2805 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2806 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2807}
2808
2809
2810/** Opcode 0x0f 0x4c. */
2811FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2812{
2813 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2814 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2815}
2816
2817
2818/** Opcode 0x0f 0x4d. */
2819FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2820{
2821 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2822 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2823}
2824
2825
2826/** Opcode 0x0f 0x4e. */
2827FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2828{
2829 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2830 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2831}
2832
2833
2834/** Opcode 0x0f 0x4f. */
2835FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2836{
2837 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2838 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2839}
2840
2841#undef CMOV_X
2842
2843/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2844FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2845/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2846FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2847/* Opcode 0xf3 0x0f 0x50 - invalid */
2848/* Opcode 0xf2 0x0f 0x50 - invalid */
2849
2850/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2851FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2852/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2853FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2854/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2855FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2856/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2857FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2858
2859/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2860FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2861/* Opcode 0x66 0x0f 0x52 - invalid */
2862/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2863FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2864/* Opcode 0xf2 0x0f 0x52 - invalid */
2865
2866/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2867FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2868/* Opcode 0x66 0x0f 0x53 - invalid */
2869/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2870FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2871/* Opcode 0xf2 0x0f 0x53 - invalid */
2872
2873/** Opcode 0x0f 0x54 - andps Vps, Wps */
2874FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2875/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2876FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2877/* Opcode 0xf3 0x0f 0x54 - invalid */
2878/* Opcode 0xf2 0x0f 0x54 - invalid */
2879
2880/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2881FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2882/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2883FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2884/* Opcode 0xf3 0x0f 0x55 - invalid */
2885/* Opcode 0xf2 0x0f 0x55 - invalid */
2886
2887/** Opcode 0x0f 0x56 - orps Vps, Wps */
2888FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2889/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2890FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2891/* Opcode 0xf3 0x0f 0x56 - invalid */
2892/* Opcode 0xf2 0x0f 0x56 - invalid */
2893
2894/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2895FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2896/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2897FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2898/* Opcode 0xf3 0x0f 0x57 - invalid */
2899/* Opcode 0xf2 0x0f 0x57 - invalid */
2900
2901/** Opcode 0x0f 0x58 - addps Vps, Wps */
2902FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2903/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2904FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2905/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2906FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2907/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2908FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2909
2910/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2911FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2912/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2913FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2914/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2915FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2916/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2917FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2918
2919/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2920FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2921/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2922FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2923/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2924FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2925/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2926FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2927
2928/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2929FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2930/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2931FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
2932/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
2933FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
2934/* Opcode 0xf2 0x0f 0x5b - invalid */
2935
2936/** Opcode 0x0f 0x5c - subps Vps, Wps */
2937FNIEMOP_STUB(iemOp_subps_Vps_Wps);
2938/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
2939FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
2940/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
2941FNIEMOP_STUB(iemOp_subss_Vss_Wss);
2942/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
2943FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
2944
2945/** Opcode 0x0f 0x5d - minps Vps, Wps */
2946FNIEMOP_STUB(iemOp_minps_Vps_Wps);
2947/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
2948FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
2949/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
2950FNIEMOP_STUB(iemOp_minss_Vss_Wss);
2951/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
2952FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
2953
2954/** Opcode 0x0f 0x5e - divps Vps, Wps */
2955FNIEMOP_STUB(iemOp_divps_Vps_Wps);
2956/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
2957FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
2958/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
2959FNIEMOP_STUB(iemOp_divss_Vss_Wss);
2960/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
2961FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
2962
2963/** Opcode 0x0f 0x5f - maxps Vps, Wps */
2964FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
2965/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
2966FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
2967/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
2968FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
2969/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
2970FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
2971
2972/**
2973 * Common worker for MMX instructions on the forms:
2974 * pxxxx mm1, mm2/mem32
2975 *
2976 * The 2nd operand is the first half of a register, which in the memory case
2977 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2978 * memory accessed for MMX.
2979 *
2980 * Exceptions type 4.
2981 */
2982FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2983{
2984 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2985 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2986 {
2987 /*
2988 * Register, register.
2989 */
2990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2991 IEM_MC_BEGIN(2, 0);
2992 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2993 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2994 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2995 IEM_MC_PREPARE_SSE_USAGE();
2996 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2997 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2998 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2999 IEM_MC_ADVANCE_RIP();
3000 IEM_MC_END();
3001 }
3002 else
3003 {
3004 /*
3005 * Register, memory.
3006 */
3007 IEM_MC_BEGIN(2, 2);
3008 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3009 IEM_MC_LOCAL(uint64_t, uSrc);
3010 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3012
3013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3015 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3016 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3017
3018 IEM_MC_PREPARE_SSE_USAGE();
3019 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3020 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3021
3022 IEM_MC_ADVANCE_RIP();
3023 IEM_MC_END();
3024 }
3025 return VINF_SUCCESS;
3026}
3027
3028
3029/**
3030 * Common worker for SSE2 instructions on the forms:
3031 * pxxxx xmm1, xmm2/mem128
3032 *
3033 * The 2nd operand is the first half of a register, which in the memory case
3034 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3035 * memory accessed for MMX.
3036 *
3037 * Exceptions type 4.
3038 */
3039FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3040{
3041 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3042 if (!pImpl->pfnU64)
3043 return IEMOP_RAISE_INVALID_OPCODE();
3044 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3045 {
3046 /*
3047 * Register, register.
3048 */
3049 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3050 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3052 IEM_MC_BEGIN(2, 0);
3053 IEM_MC_ARG(uint64_t *, pDst, 0);
3054 IEM_MC_ARG(uint32_t const *, pSrc, 1);
3055 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3056 IEM_MC_PREPARE_FPU_USAGE();
3057 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3058 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3059 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3060 IEM_MC_ADVANCE_RIP();
3061 IEM_MC_END();
3062 }
3063 else
3064 {
3065 /*
3066 * Register, memory.
3067 */
3068 IEM_MC_BEGIN(2, 2);
3069 IEM_MC_ARG(uint64_t *, pDst, 0);
3070 IEM_MC_LOCAL(uint32_t, uSrc);
3071 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3072 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3073
3074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3076 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3077 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3078
3079 IEM_MC_PREPARE_FPU_USAGE();
3080 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3081 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3082
3083 IEM_MC_ADVANCE_RIP();
3084 IEM_MC_END();
3085 }
3086 return VINF_SUCCESS;
3087}
3088
3089
3090/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3091FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3092{
3093 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3094 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3095}
3096
3097/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3098FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3099{
3100 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3101 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3102}
3103
3104/* Opcode 0xf3 0x0f 0x60 - invalid */
3105
3106
3107/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3108FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3109{
3110 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3111 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3112}
3113
3114/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3115FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3116{
3117 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3118 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3119}
3120
3121/* Opcode 0xf3 0x0f 0x61 - invalid */
3122
3123
3124/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3125FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3126{
3127 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3128 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3129}
3130
3131/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3132FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3133{
3134 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3135 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3136}
3137
3138/* Opcode 0xf3 0x0f 0x62 - invalid */
3139
3140
3141
3142/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3143FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3144/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3145FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3146/* Opcode 0xf3 0x0f 0x63 - invalid */
3147
3148/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3149FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3150/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3151FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3152/* Opcode 0xf3 0x0f 0x64 - invalid */
3153
3154/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3155FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3156/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3157FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3158/* Opcode 0xf3 0x0f 0x65 - invalid */
3159
3160/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3161FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3162/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3163FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3164/* Opcode 0xf3 0x0f 0x66 - invalid */
3165
3166/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3167FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3168/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3169FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3170/* Opcode 0xf3 0x0f 0x67 - invalid */
3171
3172
3173/**
3174 * Common worker for MMX instructions on the form:
3175 * pxxxx mm1, mm2/mem64
3176 *
3177 * The 2nd operand is the second half of a register, which in the memory case
3178 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3179 * where it may read the full 128 bits or only the upper 64 bits.
3180 *
3181 * Exceptions type 4.
3182 */
3183FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3184{
3185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3186 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3187 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3188 {
3189 /*
3190 * Register, register.
3191 */
3192 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3193 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3195 IEM_MC_BEGIN(2, 0);
3196 IEM_MC_ARG(uint64_t *, pDst, 0);
3197 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3198 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3199 IEM_MC_PREPARE_FPU_USAGE();
3200 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3201 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3202 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3203 IEM_MC_ADVANCE_RIP();
3204 IEM_MC_END();
3205 }
3206 else
3207 {
3208 /*
3209 * Register, memory.
3210 */
3211 IEM_MC_BEGIN(2, 2);
3212 IEM_MC_ARG(uint64_t *, pDst, 0);
3213 IEM_MC_LOCAL(uint64_t, uSrc);
3214 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3215 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3216
3217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3219 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3220 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3221
3222 IEM_MC_PREPARE_FPU_USAGE();
3223 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3224 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3225
3226 IEM_MC_ADVANCE_RIP();
3227 IEM_MC_END();
3228 }
3229 return VINF_SUCCESS;
3230}
3231
3232
3233/**
3234 * Common worker for SSE2 instructions on the form:
3235 * pxxxx xmm1, xmm2/mem128
3236 *
3237 * The 2nd operand is the second half of a register, which in the memory case
3238 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3239 * where it may read the full 128 bits or only the upper 64 bits.
3240 *
3241 * Exceptions type 4.
3242 */
3243FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3244{
3245 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3246 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3247 {
3248 /*
3249 * Register, register.
3250 */
3251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3252 IEM_MC_BEGIN(2, 0);
3253 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3254 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3255 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3256 IEM_MC_PREPARE_SSE_USAGE();
3257 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3258 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3259 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3260 IEM_MC_ADVANCE_RIP();
3261 IEM_MC_END();
3262 }
3263 else
3264 {
3265 /*
3266 * Register, memory.
3267 */
3268 IEM_MC_BEGIN(2, 2);
3269 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3270 IEM_MC_LOCAL(RTUINT128U, uSrc);
3271 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3273
3274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3276 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3277 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3278
3279 IEM_MC_PREPARE_SSE_USAGE();
3280 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3281 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3282
3283 IEM_MC_ADVANCE_RIP();
3284 IEM_MC_END();
3285 }
3286 return VINF_SUCCESS;
3287}
3288
3289
3290/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3291FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3292{
3293 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3294 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3295}
3296
3297/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3298FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3299{
3300 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3301 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3302}
3303/* Opcode 0xf3 0x0f 0x68 - invalid */
3304
3305
3306/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3307FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3308{
3309 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3310 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3311}
3312
3313/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3314FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3315{
3316 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3317 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3318
3319}
3320/* Opcode 0xf3 0x0f 0x69 - invalid */
3321
3322
3323/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3324FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3325{
3326 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3327 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3328}
3329
3330/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3331FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3332{
3333 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3334 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3335}
3336/* Opcode 0xf3 0x0f 0x6a - invalid */
3337
3338
3339/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3340FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3341/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3342FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3343/* Opcode 0xf3 0x0f 0x6b - invalid */
3344
3345
3346/* Opcode 0x0f 0x6c - invalid */
3347
3348/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3349FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3350{
3351 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3352 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3353}
3354
3355/* Opcode 0xf3 0x0f 0x6c - invalid */
3356/* Opcode 0xf2 0x0f 0x6c - invalid */
3357
3358
3359/* Opcode 0x0f 0x6d - invalid */
3360
3361/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3362FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3363{
3364 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3365 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3366}
3367
3368/* Opcode 0xf3 0x0f 0x6d - invalid */
3369
3370
3371FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3372{
3373 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3374 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3375 {
3376 /**
3377 * @opcode 0x6e
3378 * @opcodesub rex.w=1
3379 * @oppfx none
3380 * @opcpuid mmx
3381 * @opgroup og_mmx_datamove
3382 * @opxcpttype 5
3383 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
3384 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
3385 */
3386 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3387 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3388 {
3389 /* MMX, greg64 */
3390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3391 IEM_MC_BEGIN(0, 1);
3392 IEM_MC_LOCAL(uint64_t, u64Tmp);
3393
3394 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3395 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3396
3397 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3398 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3399 IEM_MC_FPU_TO_MMX_MODE();
3400
3401 IEM_MC_ADVANCE_RIP();
3402 IEM_MC_END();
3403 }
3404 else
3405 {
3406 /* MMX, [mem64] */
3407 IEM_MC_BEGIN(0, 2);
3408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3409 IEM_MC_LOCAL(uint64_t, u64Tmp);
3410
3411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3413 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3414 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3415
3416 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3417 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3418 IEM_MC_FPU_TO_MMX_MODE();
3419
3420 IEM_MC_ADVANCE_RIP();
3421 IEM_MC_END();
3422 }
3423 }
3424 else
3425 {
3426 /**
3427 * @opdone
3428 * @opcode 0x6e
3429 * @opcodesub rex.w=0
3430 * @oppfx none
3431 * @opcpuid mmx
3432 * @opgroup og_mmx_datamove
3433 * @opxcpttype 5
3434 * @opfunction iemOp_movd_q_Pd_Ey
3435 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3436 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3437 */
3438 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3439 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3440 {
3441 /* MMX, greg */
3442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3443 IEM_MC_BEGIN(0, 1);
3444 IEM_MC_LOCAL(uint64_t, u64Tmp);
3445
3446 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3447 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3448
3449 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3450 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3451 IEM_MC_FPU_TO_MMX_MODE();
3452
3453 IEM_MC_ADVANCE_RIP();
3454 IEM_MC_END();
3455 }
3456 else
3457 {
3458 /* MMX, [mem] */
3459 IEM_MC_BEGIN(0, 2);
3460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3461 IEM_MC_LOCAL(uint32_t, u32Tmp);
3462
3463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3465 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3466 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3467
3468 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3469 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3470 IEM_MC_FPU_TO_MMX_MODE();
3471
3472 IEM_MC_ADVANCE_RIP();
3473 IEM_MC_END();
3474 }
3475 }
3476 return VINF_SUCCESS;
3477}
3478
3479FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3480{
3481 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3482 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3483 {
3484 /**
3485 * @opcode 0x6e
3486 * @opcodesub rex.w=1
3487 * @oppfx 0x66
3488 * @opcpuid sse2
3489 * @opgroup og_sse2_simdint_datamove
3490 * @opxcpttype 5
3491 * @optest 64-bit / op1=1 op2=2 -> op1=2
3492 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3493 */
3494 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3495 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3496 {
3497 /* XMM, greg64 */
3498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3499 IEM_MC_BEGIN(0, 1);
3500 IEM_MC_LOCAL(uint64_t, u64Tmp);
3501
3502 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3503 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3504
3505 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3506 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3507
3508 IEM_MC_ADVANCE_RIP();
3509 IEM_MC_END();
3510 }
3511 else
3512 {
3513 /* XMM, [mem64] */
3514 IEM_MC_BEGIN(0, 2);
3515 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3516 IEM_MC_LOCAL(uint64_t, u64Tmp);
3517
3518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3520 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3521 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3522
3523 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3524 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3525
3526 IEM_MC_ADVANCE_RIP();
3527 IEM_MC_END();
3528 }
3529 }
3530 else
3531 {
3532 /**
3533 * @opdone
3534 * @opcode 0x6e
3535 * @opcodesub rex.w=0
3536 * @oppfx 0x66
3537 * @opcpuid sse2
3538 * @opgroup og_sse2_simdint_datamove
3539 * @opxcpttype 5
3540 * @opfunction iemOp_movd_q_Vy_Ey
3541 * @optest op1=1 op2=2 -> op1=2
3542 * @optest op1=0 op2=-42 -> op1=-42
3543 */
3544 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3545 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3546 {
3547 /* XMM, greg32 */
3548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3549 IEM_MC_BEGIN(0, 1);
3550 IEM_MC_LOCAL(uint32_t, u32Tmp);
3551
3552 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3553 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3554
3555 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3556 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3557
3558 IEM_MC_ADVANCE_RIP();
3559 IEM_MC_END();
3560 }
3561 else
3562 {
3563 /* XMM, [mem32] */
3564 IEM_MC_BEGIN(0, 2);
3565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3566 IEM_MC_LOCAL(uint32_t, u32Tmp);
3567
3568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3570 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3571 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3572
3573 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3574 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3575
3576 IEM_MC_ADVANCE_RIP();
3577 IEM_MC_END();
3578 }
3579 }
3580 return VINF_SUCCESS;
3581}
3582
3583/* Opcode 0xf3 0x0f 0x6e - invalid */
3584
3585
3586/**
3587 * @opcode 0x6f
3588 * @oppfx none
3589 * @opcpuid mmx
3590 * @opgroup og_mmx_datamove
3591 * @opxcpttype 5
3592 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3593 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3594 */
3595FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3596{
3597 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3598 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3599 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3600 {
3601 /*
3602 * Register, register.
3603 */
3604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3605 IEM_MC_BEGIN(0, 1);
3606 IEM_MC_LOCAL(uint64_t, u64Tmp);
3607
3608 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3609 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3610
3611 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3612 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3613 IEM_MC_FPU_TO_MMX_MODE();
3614
3615 IEM_MC_ADVANCE_RIP();
3616 IEM_MC_END();
3617 }
3618 else
3619 {
3620 /*
3621 * Register, memory.
3622 */
3623 IEM_MC_BEGIN(0, 2);
3624 IEM_MC_LOCAL(uint64_t, u64Tmp);
3625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3626
3627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3629 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3630 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3631
3632 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3633 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3634 IEM_MC_FPU_TO_MMX_MODE();
3635
3636 IEM_MC_ADVANCE_RIP();
3637 IEM_MC_END();
3638 }
3639 return VINF_SUCCESS;
3640}
3641
3642/**
3643 * @opcode 0x6f
3644 * @oppfx 0x66
3645 * @opcpuid sse2
3646 * @opgroup og_sse2_simdint_datamove
3647 * @opxcpttype 1
3648 * @optest op1=1 op2=2 -> op1=2
3649 * @optest op1=0 op2=-42 -> op1=-42
3650 */
3651FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
3652{
3653 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3654 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3655 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3656 {
3657 /*
3658 * Register, register.
3659 */
3660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3661 IEM_MC_BEGIN(0, 0);
3662
3663 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3664 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3665
3666 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3667 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3668 IEM_MC_ADVANCE_RIP();
3669 IEM_MC_END();
3670 }
3671 else
3672 {
3673 /*
3674 * Register, memory.
3675 */
3676 IEM_MC_BEGIN(0, 2);
3677 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3679
3680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3682 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3683 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3684
3685 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3686 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3687
3688 IEM_MC_ADVANCE_RIP();
3689 IEM_MC_END();
3690 }
3691 return VINF_SUCCESS;
3692}
3693
3694/**
3695 * @opcode 0x6f
3696 * @oppfx 0xf3
3697 * @opcpuid sse2
3698 * @opgroup og_sse2_simdint_datamove
3699 * @opxcpttype 4UA
3700 * @optest op1=1 op2=2 -> op1=2
3701 * @optest op1=0 op2=-42 -> op1=-42
3702 */
3703FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
3704{
3705 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3706 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3707 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3708 {
3709 /*
3710 * Register, register.
3711 */
3712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3713 IEM_MC_BEGIN(0, 0);
3714 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3715 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3716 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3717 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3718 IEM_MC_ADVANCE_RIP();
3719 IEM_MC_END();
3720 }
3721 else
3722 {
3723 /*
3724 * Register, memory.
3725 */
3726 IEM_MC_BEGIN(0, 2);
3727 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3728 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3729
3730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3732 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3733 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3734 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3735 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3736
3737 IEM_MC_ADVANCE_RIP();
3738 IEM_MC_END();
3739 }
3740 return VINF_SUCCESS;
3741}
3742
3743
3744/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3745FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3746{
3747 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3748 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3749 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3750 {
3751 /*
3752 * Register, register.
3753 */
3754 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3756
3757 IEM_MC_BEGIN(3, 0);
3758 IEM_MC_ARG(uint64_t *, pDst, 0);
3759 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3760 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3761 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3762 IEM_MC_PREPARE_FPU_USAGE();
3763 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3764 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3765 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3766 IEM_MC_ADVANCE_RIP();
3767 IEM_MC_END();
3768 }
3769 else
3770 {
3771 /*
3772 * Register, memory.
3773 */
3774 IEM_MC_BEGIN(3, 2);
3775 IEM_MC_ARG(uint64_t *, pDst, 0);
3776 IEM_MC_LOCAL(uint64_t, uSrc);
3777 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3779
3780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3781 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3782 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3784 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3785
3786 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3787 IEM_MC_PREPARE_FPU_USAGE();
3788 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3789 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3790
3791 IEM_MC_ADVANCE_RIP();
3792 IEM_MC_END();
3793 }
3794 return VINF_SUCCESS;
3795}
3796
3797/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3798FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3799{
3800 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3801 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3802 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3803 {
3804 /*
3805 * Register, register.
3806 */
3807 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3809
3810 IEM_MC_BEGIN(3, 0);
3811 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3812 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3813 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3814 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3815 IEM_MC_PREPARE_SSE_USAGE();
3816 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3817 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3818 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3819 IEM_MC_ADVANCE_RIP();
3820 IEM_MC_END();
3821 }
3822 else
3823 {
3824 /*
3825 * Register, memory.
3826 */
3827 IEM_MC_BEGIN(3, 2);
3828 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3829 IEM_MC_LOCAL(RTUINT128U, uSrc);
3830 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3832
3833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3834 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3835 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3837 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3838
3839 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3840 IEM_MC_PREPARE_SSE_USAGE();
3841 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3842 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3843
3844 IEM_MC_ADVANCE_RIP();
3845 IEM_MC_END();
3846 }
3847 return VINF_SUCCESS;
3848}
3849
3850/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3851FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3852{
3853 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3854 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3855 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3856 {
3857 /*
3858 * Register, register.
3859 */
3860 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3862
3863 IEM_MC_BEGIN(3, 0);
3864 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3865 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3866 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3867 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3868 IEM_MC_PREPARE_SSE_USAGE();
3869 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3870 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3871 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3872 IEM_MC_ADVANCE_RIP();
3873 IEM_MC_END();
3874 }
3875 else
3876 {
3877 /*
3878 * Register, memory.
3879 */
3880 IEM_MC_BEGIN(3, 2);
3881 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3882 IEM_MC_LOCAL(RTUINT128U, uSrc);
3883 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3884 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3885
3886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3887 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3888 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3890 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3891
3892 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3893 IEM_MC_PREPARE_SSE_USAGE();
3894 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3895 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3896
3897 IEM_MC_ADVANCE_RIP();
3898 IEM_MC_END();
3899 }
3900 return VINF_SUCCESS;
3901}
3902
3903/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3904FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3905{
3906 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3907 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3908 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3909 {
3910 /*
3911 * Register, register.
3912 */
3913 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3915
3916 IEM_MC_BEGIN(3, 0);
3917 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3918 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3919 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3920 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3921 IEM_MC_PREPARE_SSE_USAGE();
3922 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3923 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3924 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3925 IEM_MC_ADVANCE_RIP();
3926 IEM_MC_END();
3927 }
3928 else
3929 {
3930 /*
3931 * Register, memory.
3932 */
3933 IEM_MC_BEGIN(3, 2);
3934 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3935 IEM_MC_LOCAL(RTUINT128U, uSrc);
3936 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3938
3939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3940 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3941 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3943 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3944
3945 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3946 IEM_MC_PREPARE_SSE_USAGE();
3947 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3948 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3949
3950 IEM_MC_ADVANCE_RIP();
3951 IEM_MC_END();
3952 }
3953 return VINF_SUCCESS;
3954}
3955
3956
3957/** Opcode 0x0f 0x71 11/2. */
3958FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3959
3960/** Opcode 0x66 0x0f 0x71 11/2. */
3961FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3962
3963/** Opcode 0x0f 0x71 11/4. */
3964FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3965
3966/** Opcode 0x66 0x0f 0x71 11/4. */
3967FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3968
3969/** Opcode 0x0f 0x71 11/6. */
3970FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3971
3972/** Opcode 0x66 0x0f 0x71 11/6. */
3973FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3974
3975
3976/**
3977 * Group 12 jump table for register variant.
3978 */
3979IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3980{
3981 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3982 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3983 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3984 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3985 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3986 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3987 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3988 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3989};
3990AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3991
3992
3993/** Opcode 0x0f 0x71. */
3994FNIEMOP_DEF(iemOp_Grp12)
3995{
3996 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3997 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3998 /* register, register */
3999 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4000 + pVCpu->iem.s.idxPrefix], bRm);
4001 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4002}
4003
4004
4005/** Opcode 0x0f 0x72 11/2. */
4006FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
4007
4008/** Opcode 0x66 0x0f 0x72 11/2. */
4009FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
4010
4011/** Opcode 0x0f 0x72 11/4. */
4012FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
4013
4014/** Opcode 0x66 0x0f 0x72 11/4. */
4015FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
4016
4017/** Opcode 0x0f 0x72 11/6. */
4018FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
4019
4020/** Opcode 0x66 0x0f 0x72 11/6. */
4021FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
4022
4023
4024/**
4025 * Group 13 jump table for register variant.
4026 */
4027IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
4028{
4029 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4030 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4031 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4032 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4033 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4034 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4035 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4036 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4037};
4038AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
4039
4040/** Opcode 0x0f 0x72. */
4041FNIEMOP_DEF(iemOp_Grp13)
4042{
4043 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4044 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4045 /* register, register */
4046 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4047 + pVCpu->iem.s.idxPrefix], bRm);
4048 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4049}
4050
4051
4052/** Opcode 0x0f 0x73 11/2. */
4053FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
4054
4055/** Opcode 0x66 0x0f 0x73 11/2. */
4056FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
4057
4058/** Opcode 0x66 0x0f 0x73 11/3. */
4059FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
4060
4061/** Opcode 0x0f 0x73 11/6. */
4062FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
4063
4064/** Opcode 0x66 0x0f 0x73 11/6. */
4065FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
4066
4067/** Opcode 0x66 0x0f 0x73 11/7. */
4068FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
4069
4070/**
4071 * Group 14 jump table for register variant.
4072 */
4073IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4074{
4075 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4076 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4077 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4078 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4079 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4080 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4081 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4082 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4083};
4084AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4085
4086
4087/** Opcode 0x0f 0x73. */
4088FNIEMOP_DEF(iemOp_Grp14)
4089{
4090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4091 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4092 /* register, register */
4093 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4094 + pVCpu->iem.s.idxPrefix], bRm);
4095 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4096}
4097
4098
4099/**
4100 * Common worker for MMX instructions on the form:
4101 * pxxx mm1, mm2/mem64
4102 */
4103FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4104{
4105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4106 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4107 {
4108 /*
4109 * Register, register.
4110 */
4111 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4112 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4114 IEM_MC_BEGIN(2, 0);
4115 IEM_MC_ARG(uint64_t *, pDst, 0);
4116 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4117 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4118 IEM_MC_PREPARE_FPU_USAGE();
4119 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4120 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
4121 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4122 IEM_MC_ADVANCE_RIP();
4123 IEM_MC_END();
4124 }
4125 else
4126 {
4127 /*
4128 * Register, memory.
4129 */
4130 IEM_MC_BEGIN(2, 2);
4131 IEM_MC_ARG(uint64_t *, pDst, 0);
4132 IEM_MC_LOCAL(uint64_t, uSrc);
4133 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4135
4136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4138 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4139 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4140
4141 IEM_MC_PREPARE_FPU_USAGE();
4142 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4143 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4144
4145 IEM_MC_ADVANCE_RIP();
4146 IEM_MC_END();
4147 }
4148 return VINF_SUCCESS;
4149}
4150
4151
4152/**
4153 * Common worker for SSE2 instructions on the forms:
4154 * pxxx xmm1, xmm2/mem128
4155 *
4156 * Proper alignment of the 128-bit operand is enforced.
4157 * Exceptions type 4. SSE2 cpuid checks.
4158 */
4159FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4160{
4161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4162 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4163 {
4164 /*
4165 * Register, register.
4166 */
4167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4168 IEM_MC_BEGIN(2, 0);
4169 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4170 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4171 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4172 IEM_MC_PREPARE_SSE_USAGE();
4173 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4174 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4175 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4176 IEM_MC_ADVANCE_RIP();
4177 IEM_MC_END();
4178 }
4179 else
4180 {
4181 /*
4182 * Register, memory.
4183 */
4184 IEM_MC_BEGIN(2, 2);
4185 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4186 IEM_MC_LOCAL(RTUINT128U, uSrc);
4187 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4189
4190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4192 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4193 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4194
4195 IEM_MC_PREPARE_SSE_USAGE();
4196 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4197 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4198
4199 IEM_MC_ADVANCE_RIP();
4200 IEM_MC_END();
4201 }
4202 return VINF_SUCCESS;
4203}
4204
4205
4206/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4207FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4208{
4209 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
4210 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4211}
4212
4213/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4214FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4215{
4216 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
4217 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4218}
4219
4220/* Opcode 0xf3 0x0f 0x74 - invalid */
4221/* Opcode 0xf2 0x0f 0x74 - invalid */
4222
4223
4224/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4225FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4226{
4227 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
4228 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4229}
4230
4231/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4232FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4233{
4234 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
4235 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4236}
4237
4238/* Opcode 0xf3 0x0f 0x75 - invalid */
4239/* Opcode 0xf2 0x0f 0x75 - invalid */
4240
4241
4242/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4243FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4244{
4245 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
4246 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4247}
4248
4249/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4250FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4251{
4252 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
4253 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4254}
4255
4256/* Opcode 0xf3 0x0f 0x76 - invalid */
4257/* Opcode 0xf2 0x0f 0x76 - invalid */
4258
4259
4260/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4261FNIEMOP_DEF(iemOp_emms)
4262{
4263 IEMOP_MNEMONIC(emms, "emms");
4264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4265
4266 IEM_MC_BEGIN(0,0);
4267 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
4268 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4269 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4270 IEM_MC_FPU_FROM_MMX_MODE();
4271 IEM_MC_ADVANCE_RIP();
4272 IEM_MC_END();
4273 return VINF_SUCCESS;
4274}
4275
4276/* Opcode 0x66 0x0f 0x77 - invalid */
4277/* Opcode 0xf3 0x0f 0x77 - invalid */
4278/* Opcode 0xf2 0x0f 0x77 - invalid */
4279
4280/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4281FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4282/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4283FNIEMOP_STUB(iemOp_AmdGrp17);
4284/* Opcode 0xf3 0x0f 0x78 - invalid */
4285/* Opcode 0xf2 0x0f 0x78 - invalid */
4286
4287/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4288#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4289FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
4290{
4291 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
4292 IEMOP_HLP_IN_VMX_OPERATION();
4293 IEMOP_HLP_VMX_INSTR();
4294 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
4295
4296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4297 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4298 {
4299 /*
4300 * Register, register.
4301 */
4302 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4303 if (enmEffOpSize == IEMMODE_64BIT)
4304 {
4305 IEM_MC_BEGIN(2, 0);
4306 IEM_MC_ARG(uint64_t, u64Enc, 0);
4307 IEM_MC_ARG(uint64_t, u64Val, 1);
4308 IEM_MC_FETCH_GREG_U64(u64Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4309 IEM_MC_FETCH_GREG_U64(u64Val, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4310 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Enc, u64Val);
4311 IEM_MC_END();
4312 }
4313 else
4314 {
4315 IEM_MC_BEGIN(2, 0);
4316 IEM_MC_ARG(uint32_t, u32Enc, 0);
4317 IEM_MC_ARG(uint32_t, u32Val, 1);
4318 IEM_MC_FETCH_GREG_U32(u32Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4319 IEM_MC_FETCH_GREG_U32(u32Val, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4320 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Enc, u32Val);
4321 IEM_MC_END();
4322 }
4323 }
4324 else
4325 {
4326 /*
4327 * Register, memory.
4328 */
4329 if (enmEffOpSize == IEMMODE_64BIT)
4330 {
4331 IEM_MC_BEGIN(2, 0);
4332 IEM_MC_ARG(uint64_t, u64Enc, 0);
4333 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
4334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4335 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4336 IEM_MC_FETCH_GREG_U64(u64Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4337 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_mem, u64Enc, GCPtrVal);
4338 IEM_MC_END();
4339 }
4340 else
4341 {
4342 IEM_MC_BEGIN(2, 0);
4343 IEM_MC_ARG(uint32_t, u32Enc, 0);
4344 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
4345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4346 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4347 IEM_MC_FETCH_GREG_U32(u32Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4348 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_mem, u32Enc, GCPtrVal);
4349 IEM_MC_END();
4350 }
4351 }
4352 return VINF_SUCCESS;
4353}
4354#else
4355FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4356#endif
4357/* Opcode 0x66 0x0f 0x79 - invalid */
4358/* Opcode 0xf3 0x0f 0x79 - invalid */
4359/* Opcode 0xf2 0x0f 0x79 - invalid */
4360
4361/* Opcode 0x0f 0x7a - invalid */
4362/* Opcode 0x66 0x0f 0x7a - invalid */
4363/* Opcode 0xf3 0x0f 0x7a - invalid */
4364/* Opcode 0xf2 0x0f 0x7a - invalid */
4365
4366/* Opcode 0x0f 0x7b - invalid */
4367/* Opcode 0x66 0x0f 0x7b - invalid */
4368/* Opcode 0xf3 0x0f 0x7b - invalid */
4369/* Opcode 0xf2 0x0f 0x7b - invalid */
4370
4371/* Opcode 0x0f 0x7c - invalid */
4372/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4373FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4374/* Opcode 0xf3 0x0f 0x7c - invalid */
4375/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4376FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4377
4378/* Opcode 0x0f 0x7d - invalid */
4379/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4380FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4381/* Opcode 0xf3 0x0f 0x7d - invalid */
4382/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4383FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4384
4385
4386/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4387FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4388{
4389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4390 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4391 {
4392 /**
4393 * @opcode 0x7e
4394 * @opcodesub rex.w=1
4395 * @oppfx none
4396 * @opcpuid mmx
4397 * @opgroup og_mmx_datamove
4398 * @opxcpttype 5
4399 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
4400 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
4401 */
4402 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4403 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4404 {
4405 /* greg64, MMX */
4406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4407 IEM_MC_BEGIN(0, 1);
4408 IEM_MC_LOCAL(uint64_t, u64Tmp);
4409
4410 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4411 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4412
4413 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4414 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4415 IEM_MC_FPU_TO_MMX_MODE();
4416
4417 IEM_MC_ADVANCE_RIP();
4418 IEM_MC_END();
4419 }
4420 else
4421 {
4422 /* [mem64], MMX */
4423 IEM_MC_BEGIN(0, 2);
4424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4425 IEM_MC_LOCAL(uint64_t, u64Tmp);
4426
4427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4429 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4430 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4431
4432 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4433 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4434 IEM_MC_FPU_TO_MMX_MODE();
4435
4436 IEM_MC_ADVANCE_RIP();
4437 IEM_MC_END();
4438 }
4439 }
4440 else
4441 {
4442 /**
4443 * @opdone
4444 * @opcode 0x7e
4445 * @opcodesub rex.w=0
4446 * @oppfx none
4447 * @opcpuid mmx
4448 * @opgroup og_mmx_datamove
4449 * @opxcpttype 5
4450 * @opfunction iemOp_movd_q_Pd_Ey
4451 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4452 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4453 */
4454 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4455 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4456 {
4457 /* greg32, MMX */
4458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4459 IEM_MC_BEGIN(0, 1);
4460 IEM_MC_LOCAL(uint32_t, u32Tmp);
4461
4462 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4463 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4464
4465 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4466 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4467 IEM_MC_FPU_TO_MMX_MODE();
4468
4469 IEM_MC_ADVANCE_RIP();
4470 IEM_MC_END();
4471 }
4472 else
4473 {
4474 /* [mem32], MMX */
4475 IEM_MC_BEGIN(0, 2);
4476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4477 IEM_MC_LOCAL(uint32_t, u32Tmp);
4478
4479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4481 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4482 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4483
4484 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4485 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4486 IEM_MC_FPU_TO_MMX_MODE();
4487
4488 IEM_MC_ADVANCE_RIP();
4489 IEM_MC_END();
4490 }
4491 }
4492 return VINF_SUCCESS;
4493
4494}
4495
4496
4497FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4498{
4499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4500 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4501 {
4502 /**
4503 * @opcode 0x7e
4504 * @opcodesub rex.w=1
4505 * @oppfx 0x66
4506 * @opcpuid sse2
4507 * @opgroup og_sse2_simdint_datamove
4508 * @opxcpttype 5
4509 * @optest 64-bit / op1=1 op2=2 -> op1=2
4510 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4511 */
4512 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4513 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4514 {
4515 /* greg64, XMM */
4516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4517 IEM_MC_BEGIN(0, 1);
4518 IEM_MC_LOCAL(uint64_t, u64Tmp);
4519
4520 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4521 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4522
4523 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4524 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4525
4526 IEM_MC_ADVANCE_RIP();
4527 IEM_MC_END();
4528 }
4529 else
4530 {
4531 /* [mem64], XMM */
4532 IEM_MC_BEGIN(0, 2);
4533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4534 IEM_MC_LOCAL(uint64_t, u64Tmp);
4535
4536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4538 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4539 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4540
4541 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4542 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4543
4544 IEM_MC_ADVANCE_RIP();
4545 IEM_MC_END();
4546 }
4547 }
4548 else
4549 {
4550 /**
4551 * @opdone
4552 * @opcode 0x7e
4553 * @opcodesub rex.w=0
4554 * @oppfx 0x66
4555 * @opcpuid sse2
4556 * @opgroup og_sse2_simdint_datamove
4557 * @opxcpttype 5
4558 * @opfunction iemOp_movd_q_Vy_Ey
4559 * @optest op1=1 op2=2 -> op1=2
4560 * @optest op1=0 op2=-42 -> op1=-42
4561 */
4562 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4563 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4564 {
4565 /* greg32, XMM */
4566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4567 IEM_MC_BEGIN(0, 1);
4568 IEM_MC_LOCAL(uint32_t, u32Tmp);
4569
4570 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4571 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4572
4573 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4574 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4575
4576 IEM_MC_ADVANCE_RIP();
4577 IEM_MC_END();
4578 }
4579 else
4580 {
4581 /* [mem32], XMM */
4582 IEM_MC_BEGIN(0, 2);
4583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4584 IEM_MC_LOCAL(uint32_t, u32Tmp);
4585
4586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4588 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4589 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4590
4591 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4592 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4593
4594 IEM_MC_ADVANCE_RIP();
4595 IEM_MC_END();
4596 }
4597 }
4598 return VINF_SUCCESS;
4599
4600}
4601
4602/**
4603 * @opcode 0x7e
4604 * @oppfx 0xf3
4605 * @opcpuid sse2
4606 * @opgroup og_sse2_pcksclr_datamove
4607 * @opxcpttype none
4608 * @optest op1=1 op2=2 -> op1=2
4609 * @optest op1=0 op2=-42 -> op1=-42
4610 */
4611FNIEMOP_DEF(iemOp_movq_Vq_Wq)
4612{
4613 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4614 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4615 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4616 {
4617 /*
4618 * Register, register.
4619 */
4620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4621 IEM_MC_BEGIN(0, 2);
4622 IEM_MC_LOCAL(uint64_t, uSrc);
4623
4624 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4625 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4626
4627 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4628 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4629
4630 IEM_MC_ADVANCE_RIP();
4631 IEM_MC_END();
4632 }
4633 else
4634 {
4635 /*
4636 * Memory, register.
4637 */
4638 IEM_MC_BEGIN(0, 2);
4639 IEM_MC_LOCAL(uint64_t, uSrc);
4640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4641
4642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4644 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4645 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4646
4647 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4648 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4649
4650 IEM_MC_ADVANCE_RIP();
4651 IEM_MC_END();
4652 }
4653 return VINF_SUCCESS;
4654}
4655
4656/* Opcode 0xf2 0x0f 0x7e - invalid */
4657
4658
4659/** Opcode 0x0f 0x7f - movq Qq, Pq */
4660FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4661{
4662 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4663 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4664 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4665 {
4666 /*
4667 * Register, register.
4668 */
4669 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4670 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4672 IEM_MC_BEGIN(0, 1);
4673 IEM_MC_LOCAL(uint64_t, u64Tmp);
4674 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4675 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4676 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4677 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4678 IEM_MC_ADVANCE_RIP();
4679 IEM_MC_END();
4680 }
4681 else
4682 {
4683 /*
4684 * Register, memory.
4685 */
4686 IEM_MC_BEGIN(0, 2);
4687 IEM_MC_LOCAL(uint64_t, u64Tmp);
4688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4689
4690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4692 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4693 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4694
4695 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4696 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4697
4698 IEM_MC_ADVANCE_RIP();
4699 IEM_MC_END();
4700 }
4701 return VINF_SUCCESS;
4702}
4703
4704/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4705FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4706{
4707 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4708 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4709 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4710 {
4711 /*
4712 * Register, register.
4713 */
4714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4715 IEM_MC_BEGIN(0, 0);
4716 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4717 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4718 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4719 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4720 IEM_MC_ADVANCE_RIP();
4721 IEM_MC_END();
4722 }
4723 else
4724 {
4725 /*
4726 * Register, memory.
4727 */
4728 IEM_MC_BEGIN(0, 2);
4729 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4731
4732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4734 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4735 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4736
4737 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4738 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4739
4740 IEM_MC_ADVANCE_RIP();
4741 IEM_MC_END();
4742 }
4743 return VINF_SUCCESS;
4744}
4745
4746/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4747FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4748{
4749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4750 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4751 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4752 {
4753 /*
4754 * Register, register.
4755 */
4756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4757 IEM_MC_BEGIN(0, 0);
4758 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4759 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4760 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4761 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4762 IEM_MC_ADVANCE_RIP();
4763 IEM_MC_END();
4764 }
4765 else
4766 {
4767 /*
4768 * Register, memory.
4769 */
4770 IEM_MC_BEGIN(0, 2);
4771 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4773
4774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4776 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4777 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4778
4779 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4780 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4781
4782 IEM_MC_ADVANCE_RIP();
4783 IEM_MC_END();
4784 }
4785 return VINF_SUCCESS;
4786}
4787
4788/* Opcode 0xf2 0x0f 0x7f - invalid */
4789
4790
4791
4792/** Opcode 0x0f 0x80. */
4793FNIEMOP_DEF(iemOp_jo_Jv)
4794{
4795 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4796 IEMOP_HLP_MIN_386();
4797 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4798 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4799 {
4800 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4802
4803 IEM_MC_BEGIN(0, 0);
4804 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4805 IEM_MC_REL_JMP_S16(i16Imm);
4806 } IEM_MC_ELSE() {
4807 IEM_MC_ADVANCE_RIP();
4808 } IEM_MC_ENDIF();
4809 IEM_MC_END();
4810 }
4811 else
4812 {
4813 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4815
4816 IEM_MC_BEGIN(0, 0);
4817 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4818 IEM_MC_REL_JMP_S32(i32Imm);
4819 } IEM_MC_ELSE() {
4820 IEM_MC_ADVANCE_RIP();
4821 } IEM_MC_ENDIF();
4822 IEM_MC_END();
4823 }
4824 return VINF_SUCCESS;
4825}
4826
4827
4828/** Opcode 0x0f 0x81. */
4829FNIEMOP_DEF(iemOp_jno_Jv)
4830{
4831 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4832 IEMOP_HLP_MIN_386();
4833 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4834 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4835 {
4836 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4838
4839 IEM_MC_BEGIN(0, 0);
4840 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4841 IEM_MC_ADVANCE_RIP();
4842 } IEM_MC_ELSE() {
4843 IEM_MC_REL_JMP_S16(i16Imm);
4844 } IEM_MC_ENDIF();
4845 IEM_MC_END();
4846 }
4847 else
4848 {
4849 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4851
4852 IEM_MC_BEGIN(0, 0);
4853 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4854 IEM_MC_ADVANCE_RIP();
4855 } IEM_MC_ELSE() {
4856 IEM_MC_REL_JMP_S32(i32Imm);
4857 } IEM_MC_ENDIF();
4858 IEM_MC_END();
4859 }
4860 return VINF_SUCCESS;
4861}
4862
4863
4864/** Opcode 0x0f 0x82. */
4865FNIEMOP_DEF(iemOp_jc_Jv)
4866{
4867 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4868 IEMOP_HLP_MIN_386();
4869 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4870 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4871 {
4872 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4874
4875 IEM_MC_BEGIN(0, 0);
4876 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4877 IEM_MC_REL_JMP_S16(i16Imm);
4878 } IEM_MC_ELSE() {
4879 IEM_MC_ADVANCE_RIP();
4880 } IEM_MC_ENDIF();
4881 IEM_MC_END();
4882 }
4883 else
4884 {
4885 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4887
4888 IEM_MC_BEGIN(0, 0);
4889 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4890 IEM_MC_REL_JMP_S32(i32Imm);
4891 } IEM_MC_ELSE() {
4892 IEM_MC_ADVANCE_RIP();
4893 } IEM_MC_ENDIF();
4894 IEM_MC_END();
4895 }
4896 return VINF_SUCCESS;
4897}
4898
4899
4900/** Opcode 0x0f 0x83. */
4901FNIEMOP_DEF(iemOp_jnc_Jv)
4902{
4903 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4904 IEMOP_HLP_MIN_386();
4905 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4906 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4907 {
4908 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4910
4911 IEM_MC_BEGIN(0, 0);
4912 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4913 IEM_MC_ADVANCE_RIP();
4914 } IEM_MC_ELSE() {
4915 IEM_MC_REL_JMP_S16(i16Imm);
4916 } IEM_MC_ENDIF();
4917 IEM_MC_END();
4918 }
4919 else
4920 {
4921 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4923
4924 IEM_MC_BEGIN(0, 0);
4925 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4926 IEM_MC_ADVANCE_RIP();
4927 } IEM_MC_ELSE() {
4928 IEM_MC_REL_JMP_S32(i32Imm);
4929 } IEM_MC_ENDIF();
4930 IEM_MC_END();
4931 }
4932 return VINF_SUCCESS;
4933}
4934
4935
4936/** Opcode 0x0f 0x84. */
4937FNIEMOP_DEF(iemOp_je_Jv)
4938{
4939 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4940 IEMOP_HLP_MIN_386();
4941 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4942 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4943 {
4944 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4946
4947 IEM_MC_BEGIN(0, 0);
4948 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4949 IEM_MC_REL_JMP_S16(i16Imm);
4950 } IEM_MC_ELSE() {
4951 IEM_MC_ADVANCE_RIP();
4952 } IEM_MC_ENDIF();
4953 IEM_MC_END();
4954 }
4955 else
4956 {
4957 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4959
4960 IEM_MC_BEGIN(0, 0);
4961 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4962 IEM_MC_REL_JMP_S32(i32Imm);
4963 } IEM_MC_ELSE() {
4964 IEM_MC_ADVANCE_RIP();
4965 } IEM_MC_ENDIF();
4966 IEM_MC_END();
4967 }
4968 return VINF_SUCCESS;
4969}
4970
4971
4972/** Opcode 0x0f 0x85. */
4973FNIEMOP_DEF(iemOp_jne_Jv)
4974{
4975 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4976 IEMOP_HLP_MIN_386();
4977 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4978 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4979 {
4980 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4982
4983 IEM_MC_BEGIN(0, 0);
4984 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4985 IEM_MC_ADVANCE_RIP();
4986 } IEM_MC_ELSE() {
4987 IEM_MC_REL_JMP_S16(i16Imm);
4988 } IEM_MC_ENDIF();
4989 IEM_MC_END();
4990 }
4991 else
4992 {
4993 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4995
4996 IEM_MC_BEGIN(0, 0);
4997 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4998 IEM_MC_ADVANCE_RIP();
4999 } IEM_MC_ELSE() {
5000 IEM_MC_REL_JMP_S32(i32Imm);
5001 } IEM_MC_ENDIF();
5002 IEM_MC_END();
5003 }
5004 return VINF_SUCCESS;
5005}
5006
5007
5008/** Opcode 0x0f 0x86. */
5009FNIEMOP_DEF(iemOp_jbe_Jv)
5010{
5011 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
5012 IEMOP_HLP_MIN_386();
5013 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5014 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5015 {
5016 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5018
5019 IEM_MC_BEGIN(0, 0);
5020 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5021 IEM_MC_REL_JMP_S16(i16Imm);
5022 } IEM_MC_ELSE() {
5023 IEM_MC_ADVANCE_RIP();
5024 } IEM_MC_ENDIF();
5025 IEM_MC_END();
5026 }
5027 else
5028 {
5029 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5031
5032 IEM_MC_BEGIN(0, 0);
5033 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5034 IEM_MC_REL_JMP_S32(i32Imm);
5035 } IEM_MC_ELSE() {
5036 IEM_MC_ADVANCE_RIP();
5037 } IEM_MC_ENDIF();
5038 IEM_MC_END();
5039 }
5040 return VINF_SUCCESS;
5041}
5042
5043
5044/** Opcode 0x0f 0x87. */
5045FNIEMOP_DEF(iemOp_jnbe_Jv)
5046{
5047 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
5048 IEMOP_HLP_MIN_386();
5049 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5050 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5051 {
5052 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5054
5055 IEM_MC_BEGIN(0, 0);
5056 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5057 IEM_MC_ADVANCE_RIP();
5058 } IEM_MC_ELSE() {
5059 IEM_MC_REL_JMP_S16(i16Imm);
5060 } IEM_MC_ENDIF();
5061 IEM_MC_END();
5062 }
5063 else
5064 {
5065 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5067
5068 IEM_MC_BEGIN(0, 0);
5069 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5070 IEM_MC_ADVANCE_RIP();
5071 } IEM_MC_ELSE() {
5072 IEM_MC_REL_JMP_S32(i32Imm);
5073 } IEM_MC_ENDIF();
5074 IEM_MC_END();
5075 }
5076 return VINF_SUCCESS;
5077}
5078
5079
5080/** Opcode 0x0f 0x88. */
5081FNIEMOP_DEF(iemOp_js_Jv)
5082{
5083 IEMOP_MNEMONIC(js_Jv, "js Jv");
5084 IEMOP_HLP_MIN_386();
5085 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5086 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5087 {
5088 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5090
5091 IEM_MC_BEGIN(0, 0);
5092 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5093 IEM_MC_REL_JMP_S16(i16Imm);
5094 } IEM_MC_ELSE() {
5095 IEM_MC_ADVANCE_RIP();
5096 } IEM_MC_ENDIF();
5097 IEM_MC_END();
5098 }
5099 else
5100 {
5101 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5103
5104 IEM_MC_BEGIN(0, 0);
5105 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5106 IEM_MC_REL_JMP_S32(i32Imm);
5107 } IEM_MC_ELSE() {
5108 IEM_MC_ADVANCE_RIP();
5109 } IEM_MC_ENDIF();
5110 IEM_MC_END();
5111 }
5112 return VINF_SUCCESS;
5113}
5114
5115
5116/** Opcode 0x0f 0x89. */
5117FNIEMOP_DEF(iemOp_jns_Jv)
5118{
5119 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
5120 IEMOP_HLP_MIN_386();
5121 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5122 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5123 {
5124 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5126
5127 IEM_MC_BEGIN(0, 0);
5128 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5129 IEM_MC_ADVANCE_RIP();
5130 } IEM_MC_ELSE() {
5131 IEM_MC_REL_JMP_S16(i16Imm);
5132 } IEM_MC_ENDIF();
5133 IEM_MC_END();
5134 }
5135 else
5136 {
5137 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5139
5140 IEM_MC_BEGIN(0, 0);
5141 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5142 IEM_MC_ADVANCE_RIP();
5143 } IEM_MC_ELSE() {
5144 IEM_MC_REL_JMP_S32(i32Imm);
5145 } IEM_MC_ENDIF();
5146 IEM_MC_END();
5147 }
5148 return VINF_SUCCESS;
5149}
5150
5151
5152/** Opcode 0x0f 0x8a. */
5153FNIEMOP_DEF(iemOp_jp_Jv)
5154{
5155 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
5156 IEMOP_HLP_MIN_386();
5157 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5158 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5159 {
5160 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5162
5163 IEM_MC_BEGIN(0, 0);
5164 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5165 IEM_MC_REL_JMP_S16(i16Imm);
5166 } IEM_MC_ELSE() {
5167 IEM_MC_ADVANCE_RIP();
5168 } IEM_MC_ENDIF();
5169 IEM_MC_END();
5170 }
5171 else
5172 {
5173 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5175
5176 IEM_MC_BEGIN(0, 0);
5177 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5178 IEM_MC_REL_JMP_S32(i32Imm);
5179 } IEM_MC_ELSE() {
5180 IEM_MC_ADVANCE_RIP();
5181 } IEM_MC_ENDIF();
5182 IEM_MC_END();
5183 }
5184 return VINF_SUCCESS;
5185}
5186
5187
5188/** Opcode 0x0f 0x8b. */
5189FNIEMOP_DEF(iemOp_jnp_Jv)
5190{
5191 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
5192 IEMOP_HLP_MIN_386();
5193 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5194 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5195 {
5196 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5198
5199 IEM_MC_BEGIN(0, 0);
5200 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5201 IEM_MC_ADVANCE_RIP();
5202 } IEM_MC_ELSE() {
5203 IEM_MC_REL_JMP_S16(i16Imm);
5204 } IEM_MC_ENDIF();
5205 IEM_MC_END();
5206 }
5207 else
5208 {
5209 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5211
5212 IEM_MC_BEGIN(0, 0);
5213 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5214 IEM_MC_ADVANCE_RIP();
5215 } IEM_MC_ELSE() {
5216 IEM_MC_REL_JMP_S32(i32Imm);
5217 } IEM_MC_ENDIF();
5218 IEM_MC_END();
5219 }
5220 return VINF_SUCCESS;
5221}
5222
5223
5224/** Opcode 0x0f 0x8c. */
5225FNIEMOP_DEF(iemOp_jl_Jv)
5226{
5227 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
5228 IEMOP_HLP_MIN_386();
5229 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5230 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5231 {
5232 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5234
5235 IEM_MC_BEGIN(0, 0);
5236 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5237 IEM_MC_REL_JMP_S16(i16Imm);
5238 } IEM_MC_ELSE() {
5239 IEM_MC_ADVANCE_RIP();
5240 } IEM_MC_ENDIF();
5241 IEM_MC_END();
5242 }
5243 else
5244 {
5245 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5247
5248 IEM_MC_BEGIN(0, 0);
5249 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5250 IEM_MC_REL_JMP_S32(i32Imm);
5251 } IEM_MC_ELSE() {
5252 IEM_MC_ADVANCE_RIP();
5253 } IEM_MC_ENDIF();
5254 IEM_MC_END();
5255 }
5256 return VINF_SUCCESS;
5257}
5258
5259
5260/** Opcode 0x0f 0x8d. */
5261FNIEMOP_DEF(iemOp_jnl_Jv)
5262{
5263 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5264 IEMOP_HLP_MIN_386();
5265 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5266 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5267 {
5268 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5270
5271 IEM_MC_BEGIN(0, 0);
5272 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5273 IEM_MC_ADVANCE_RIP();
5274 } IEM_MC_ELSE() {
5275 IEM_MC_REL_JMP_S16(i16Imm);
5276 } IEM_MC_ENDIF();
5277 IEM_MC_END();
5278 }
5279 else
5280 {
5281 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5283
5284 IEM_MC_BEGIN(0, 0);
5285 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5286 IEM_MC_ADVANCE_RIP();
5287 } IEM_MC_ELSE() {
5288 IEM_MC_REL_JMP_S32(i32Imm);
5289 } IEM_MC_ENDIF();
5290 IEM_MC_END();
5291 }
5292 return VINF_SUCCESS;
5293}
5294
5295
5296/** Opcode 0x0f 0x8e. */
5297FNIEMOP_DEF(iemOp_jle_Jv)
5298{
5299 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5300 IEMOP_HLP_MIN_386();
5301 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5302 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5303 {
5304 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5306
5307 IEM_MC_BEGIN(0, 0);
5308 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5309 IEM_MC_REL_JMP_S16(i16Imm);
5310 } IEM_MC_ELSE() {
5311 IEM_MC_ADVANCE_RIP();
5312 } IEM_MC_ENDIF();
5313 IEM_MC_END();
5314 }
5315 else
5316 {
5317 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5319
5320 IEM_MC_BEGIN(0, 0);
5321 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5322 IEM_MC_REL_JMP_S32(i32Imm);
5323 } IEM_MC_ELSE() {
5324 IEM_MC_ADVANCE_RIP();
5325 } IEM_MC_ENDIF();
5326 IEM_MC_END();
5327 }
5328 return VINF_SUCCESS;
5329}
5330
5331
5332/** Opcode 0x0f 0x8f. */
5333FNIEMOP_DEF(iemOp_jnle_Jv)
5334{
5335 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5336 IEMOP_HLP_MIN_386();
5337 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5338 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5339 {
5340 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5342
5343 IEM_MC_BEGIN(0, 0);
5344 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5345 IEM_MC_ADVANCE_RIP();
5346 } IEM_MC_ELSE() {
5347 IEM_MC_REL_JMP_S16(i16Imm);
5348 } IEM_MC_ENDIF();
5349 IEM_MC_END();
5350 }
5351 else
5352 {
5353 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5355
5356 IEM_MC_BEGIN(0, 0);
5357 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5358 IEM_MC_ADVANCE_RIP();
5359 } IEM_MC_ELSE() {
5360 IEM_MC_REL_JMP_S32(i32Imm);
5361 } IEM_MC_ENDIF();
5362 IEM_MC_END();
5363 }
5364 return VINF_SUCCESS;
5365}
5366
5367
5368/** Opcode 0x0f 0x90. */
5369FNIEMOP_DEF(iemOp_seto_Eb)
5370{
5371 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5372 IEMOP_HLP_MIN_386();
5373 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5374
5375 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5376 * any way. AMD says it's "unused", whatever that means. We're
5377 * ignoring for now. */
5378 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5379 {
5380 /* register target */
5381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5382 IEM_MC_BEGIN(0, 0);
5383 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5384 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5385 } IEM_MC_ELSE() {
5386 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5387 } IEM_MC_ENDIF();
5388 IEM_MC_ADVANCE_RIP();
5389 IEM_MC_END();
5390 }
5391 else
5392 {
5393 /* memory target */
5394 IEM_MC_BEGIN(0, 1);
5395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5398 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5399 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5400 } IEM_MC_ELSE() {
5401 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5402 } IEM_MC_ENDIF();
5403 IEM_MC_ADVANCE_RIP();
5404 IEM_MC_END();
5405 }
5406 return VINF_SUCCESS;
5407}
5408
5409
5410/** Opcode 0x0f 0x91. */
5411FNIEMOP_DEF(iemOp_setno_Eb)
5412{
5413 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5414 IEMOP_HLP_MIN_386();
5415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5416
5417 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5418 * any way. AMD says it's "unused", whatever that means. We're
5419 * ignoring for now. */
5420 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5421 {
5422 /* register target */
5423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5424 IEM_MC_BEGIN(0, 0);
5425 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5426 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5427 } IEM_MC_ELSE() {
5428 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5429 } IEM_MC_ENDIF();
5430 IEM_MC_ADVANCE_RIP();
5431 IEM_MC_END();
5432 }
5433 else
5434 {
5435 /* memory target */
5436 IEM_MC_BEGIN(0, 1);
5437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5440 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5441 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5442 } IEM_MC_ELSE() {
5443 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5444 } IEM_MC_ENDIF();
5445 IEM_MC_ADVANCE_RIP();
5446 IEM_MC_END();
5447 }
5448 return VINF_SUCCESS;
5449}
5450
5451
5452/** Opcode 0x0f 0x92. */
5453FNIEMOP_DEF(iemOp_setc_Eb)
5454{
5455 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5456 IEMOP_HLP_MIN_386();
5457 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5458
5459 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5460 * any way. AMD says it's "unused", whatever that means. We're
5461 * ignoring for now. */
5462 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5463 {
5464 /* register target */
5465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5466 IEM_MC_BEGIN(0, 0);
5467 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5468 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5469 } IEM_MC_ELSE() {
5470 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5471 } IEM_MC_ENDIF();
5472 IEM_MC_ADVANCE_RIP();
5473 IEM_MC_END();
5474 }
5475 else
5476 {
5477 /* memory target */
5478 IEM_MC_BEGIN(0, 1);
5479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5480 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5482 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5483 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5484 } IEM_MC_ELSE() {
5485 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5486 } IEM_MC_ENDIF();
5487 IEM_MC_ADVANCE_RIP();
5488 IEM_MC_END();
5489 }
5490 return VINF_SUCCESS;
5491}
5492
5493
5494/** Opcode 0x0f 0x93. */
5495FNIEMOP_DEF(iemOp_setnc_Eb)
5496{
5497 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5498 IEMOP_HLP_MIN_386();
5499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5500
5501 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5502 * any way. AMD says it's "unused", whatever that means. We're
5503 * ignoring for now. */
5504 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5505 {
5506 /* register target */
5507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5508 IEM_MC_BEGIN(0, 0);
5509 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5510 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5511 } IEM_MC_ELSE() {
5512 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5513 } IEM_MC_ENDIF();
5514 IEM_MC_ADVANCE_RIP();
5515 IEM_MC_END();
5516 }
5517 else
5518 {
5519 /* memory target */
5520 IEM_MC_BEGIN(0, 1);
5521 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5524 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5525 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5526 } IEM_MC_ELSE() {
5527 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5528 } IEM_MC_ENDIF();
5529 IEM_MC_ADVANCE_RIP();
5530 IEM_MC_END();
5531 }
5532 return VINF_SUCCESS;
5533}
5534
5535
5536/** Opcode 0x0f 0x94. */
5537FNIEMOP_DEF(iemOp_sete_Eb)
5538{
5539 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5540 IEMOP_HLP_MIN_386();
5541 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5542
5543 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5544 * any way. AMD says it's "unused", whatever that means. We're
5545 * ignoring for now. */
5546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5547 {
5548 /* register target */
5549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5550 IEM_MC_BEGIN(0, 0);
5551 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5552 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5553 } IEM_MC_ELSE() {
5554 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5555 } IEM_MC_ENDIF();
5556 IEM_MC_ADVANCE_RIP();
5557 IEM_MC_END();
5558 }
5559 else
5560 {
5561 /* memory target */
5562 IEM_MC_BEGIN(0, 1);
5563 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5566 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5567 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5568 } IEM_MC_ELSE() {
5569 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5570 } IEM_MC_ENDIF();
5571 IEM_MC_ADVANCE_RIP();
5572 IEM_MC_END();
5573 }
5574 return VINF_SUCCESS;
5575}
5576
5577
5578/** Opcode 0x0f 0x95. */
5579FNIEMOP_DEF(iemOp_setne_Eb)
5580{
5581 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5582 IEMOP_HLP_MIN_386();
5583 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5584
5585 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5586 * any way. AMD says it's "unused", whatever that means. We're
5587 * ignoring for now. */
5588 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5589 {
5590 /* register target */
5591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5592 IEM_MC_BEGIN(0, 0);
5593 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5594 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5595 } IEM_MC_ELSE() {
5596 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5597 } IEM_MC_ENDIF();
5598 IEM_MC_ADVANCE_RIP();
5599 IEM_MC_END();
5600 }
5601 else
5602 {
5603 /* memory target */
5604 IEM_MC_BEGIN(0, 1);
5605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5608 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5609 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5610 } IEM_MC_ELSE() {
5611 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5612 } IEM_MC_ENDIF();
5613 IEM_MC_ADVANCE_RIP();
5614 IEM_MC_END();
5615 }
5616 return VINF_SUCCESS;
5617}
5618
5619
5620/** Opcode 0x0f 0x96. */
5621FNIEMOP_DEF(iemOp_setbe_Eb)
5622{
5623 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5624 IEMOP_HLP_MIN_386();
5625 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5626
5627 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5628 * any way. AMD says it's "unused", whatever that means. We're
5629 * ignoring for now. */
5630 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5631 {
5632 /* register target */
5633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5634 IEM_MC_BEGIN(0, 0);
5635 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5636 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5637 } IEM_MC_ELSE() {
5638 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5639 } IEM_MC_ENDIF();
5640 IEM_MC_ADVANCE_RIP();
5641 IEM_MC_END();
5642 }
5643 else
5644 {
5645 /* memory target */
5646 IEM_MC_BEGIN(0, 1);
5647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5650 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5651 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5652 } IEM_MC_ELSE() {
5653 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5654 } IEM_MC_ENDIF();
5655 IEM_MC_ADVANCE_RIP();
5656 IEM_MC_END();
5657 }
5658 return VINF_SUCCESS;
5659}
5660
5661
5662/** Opcode 0x0f 0x97. */
5663FNIEMOP_DEF(iemOp_setnbe_Eb)
5664{
5665 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5666 IEMOP_HLP_MIN_386();
5667 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5668
5669 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5670 * any way. AMD says it's "unused", whatever that means. We're
5671 * ignoring for now. */
5672 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5673 {
5674 /* register target */
5675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5676 IEM_MC_BEGIN(0, 0);
5677 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5678 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5679 } IEM_MC_ELSE() {
5680 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5681 } IEM_MC_ENDIF();
5682 IEM_MC_ADVANCE_RIP();
5683 IEM_MC_END();
5684 }
5685 else
5686 {
5687 /* memory target */
5688 IEM_MC_BEGIN(0, 1);
5689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5692 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5693 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5694 } IEM_MC_ELSE() {
5695 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5696 } IEM_MC_ENDIF();
5697 IEM_MC_ADVANCE_RIP();
5698 IEM_MC_END();
5699 }
5700 return VINF_SUCCESS;
5701}
5702
5703
5704/** Opcode 0x0f 0x98. */
5705FNIEMOP_DEF(iemOp_sets_Eb)
5706{
5707 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5708 IEMOP_HLP_MIN_386();
5709 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5710
5711 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5712 * any way. AMD says it's "unused", whatever that means. We're
5713 * ignoring for now. */
5714 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5715 {
5716 /* register target */
5717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5718 IEM_MC_BEGIN(0, 0);
5719 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5720 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5721 } IEM_MC_ELSE() {
5722 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5723 } IEM_MC_ENDIF();
5724 IEM_MC_ADVANCE_RIP();
5725 IEM_MC_END();
5726 }
5727 else
5728 {
5729 /* memory target */
5730 IEM_MC_BEGIN(0, 1);
5731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5734 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5735 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5736 } IEM_MC_ELSE() {
5737 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5738 } IEM_MC_ENDIF();
5739 IEM_MC_ADVANCE_RIP();
5740 IEM_MC_END();
5741 }
5742 return VINF_SUCCESS;
5743}
5744
5745
5746/** Opcode 0x0f 0x99. */
5747FNIEMOP_DEF(iemOp_setns_Eb)
5748{
5749 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5750 IEMOP_HLP_MIN_386();
5751 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5752
5753 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5754 * any way. AMD says it's "unused", whatever that means. We're
5755 * ignoring for now. */
5756 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5757 {
5758 /* register target */
5759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5760 IEM_MC_BEGIN(0, 0);
5761 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5762 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5763 } IEM_MC_ELSE() {
5764 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5765 } IEM_MC_ENDIF();
5766 IEM_MC_ADVANCE_RIP();
5767 IEM_MC_END();
5768 }
5769 else
5770 {
5771 /* memory target */
5772 IEM_MC_BEGIN(0, 1);
5773 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5776 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5777 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5778 } IEM_MC_ELSE() {
5779 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5780 } IEM_MC_ENDIF();
5781 IEM_MC_ADVANCE_RIP();
5782 IEM_MC_END();
5783 }
5784 return VINF_SUCCESS;
5785}
5786
5787
5788/** Opcode 0x0f 0x9a. */
5789FNIEMOP_DEF(iemOp_setp_Eb)
5790{
5791 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5792 IEMOP_HLP_MIN_386();
5793 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5794
5795 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5796 * any way. AMD says it's "unused", whatever that means. We're
5797 * ignoring for now. */
5798 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5799 {
5800 /* register target */
5801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5802 IEM_MC_BEGIN(0, 0);
5803 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5804 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5805 } IEM_MC_ELSE() {
5806 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5807 } IEM_MC_ENDIF();
5808 IEM_MC_ADVANCE_RIP();
5809 IEM_MC_END();
5810 }
5811 else
5812 {
5813 /* memory target */
5814 IEM_MC_BEGIN(0, 1);
5815 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5818 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5819 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5820 } IEM_MC_ELSE() {
5821 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5822 } IEM_MC_ENDIF();
5823 IEM_MC_ADVANCE_RIP();
5824 IEM_MC_END();
5825 }
5826 return VINF_SUCCESS;
5827}
5828
5829
5830/** Opcode 0x0f 0x9b. */
5831FNIEMOP_DEF(iemOp_setnp_Eb)
5832{
5833 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5834 IEMOP_HLP_MIN_386();
5835 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5836
5837 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5838 * any way. AMD says it's "unused", whatever that means. We're
5839 * ignoring for now. */
5840 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5841 {
5842 /* register target */
5843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5844 IEM_MC_BEGIN(0, 0);
5845 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5846 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5847 } IEM_MC_ELSE() {
5848 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5849 } IEM_MC_ENDIF();
5850 IEM_MC_ADVANCE_RIP();
5851 IEM_MC_END();
5852 }
5853 else
5854 {
5855 /* memory target */
5856 IEM_MC_BEGIN(0, 1);
5857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5860 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5861 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5862 } IEM_MC_ELSE() {
5863 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5864 } IEM_MC_ENDIF();
5865 IEM_MC_ADVANCE_RIP();
5866 IEM_MC_END();
5867 }
5868 return VINF_SUCCESS;
5869}
5870
5871
5872/** Opcode 0x0f 0x9c. */
5873FNIEMOP_DEF(iemOp_setl_Eb)
5874{
5875 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5876 IEMOP_HLP_MIN_386();
5877 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5878
5879 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5880 * any way. AMD says it's "unused", whatever that means. We're
5881 * ignoring for now. */
5882 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5883 {
5884 /* register target */
5885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5886 IEM_MC_BEGIN(0, 0);
5887 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5888 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5889 } IEM_MC_ELSE() {
5890 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5891 } IEM_MC_ENDIF();
5892 IEM_MC_ADVANCE_RIP();
5893 IEM_MC_END();
5894 }
5895 else
5896 {
5897 /* memory target */
5898 IEM_MC_BEGIN(0, 1);
5899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5902 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5903 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5904 } IEM_MC_ELSE() {
5905 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5906 } IEM_MC_ENDIF();
5907 IEM_MC_ADVANCE_RIP();
5908 IEM_MC_END();
5909 }
5910 return VINF_SUCCESS;
5911}
5912
5913
5914/** Opcode 0x0f 0x9d. */
5915FNIEMOP_DEF(iemOp_setnl_Eb)
5916{
5917 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5918 IEMOP_HLP_MIN_386();
5919 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5920
5921 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5922 * any way. AMD says it's "unused", whatever that means. We're
5923 * ignoring for now. */
5924 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5925 {
5926 /* register target */
5927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5928 IEM_MC_BEGIN(0, 0);
5929 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5930 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5931 } IEM_MC_ELSE() {
5932 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5933 } IEM_MC_ENDIF();
5934 IEM_MC_ADVANCE_RIP();
5935 IEM_MC_END();
5936 }
5937 else
5938 {
5939 /* memory target */
5940 IEM_MC_BEGIN(0, 1);
5941 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5944 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5945 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5946 } IEM_MC_ELSE() {
5947 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5948 } IEM_MC_ENDIF();
5949 IEM_MC_ADVANCE_RIP();
5950 IEM_MC_END();
5951 }
5952 return VINF_SUCCESS;
5953}
5954
5955
5956/** Opcode 0x0f 0x9e. */
5957FNIEMOP_DEF(iemOp_setle_Eb)
5958{
5959 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5960 IEMOP_HLP_MIN_386();
5961 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5962
5963 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5964 * any way. AMD says it's "unused", whatever that means. We're
5965 * ignoring for now. */
5966 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5967 {
5968 /* register target */
5969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5970 IEM_MC_BEGIN(0, 0);
5971 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5972 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5973 } IEM_MC_ELSE() {
5974 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5975 } IEM_MC_ENDIF();
5976 IEM_MC_ADVANCE_RIP();
5977 IEM_MC_END();
5978 }
5979 else
5980 {
5981 /* memory target */
5982 IEM_MC_BEGIN(0, 1);
5983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5986 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5987 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5988 } IEM_MC_ELSE() {
5989 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5990 } IEM_MC_ENDIF();
5991 IEM_MC_ADVANCE_RIP();
5992 IEM_MC_END();
5993 }
5994 return VINF_SUCCESS;
5995}
5996
5997
5998/** Opcode 0x0f 0x9f. */
5999FNIEMOP_DEF(iemOp_setnle_Eb)
6000{
6001 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
6002 IEMOP_HLP_MIN_386();
6003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6004
6005 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6006 * any way. AMD says it's "unused", whatever that means. We're
6007 * ignoring for now. */
6008 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6009 {
6010 /* register target */
6011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6012 IEM_MC_BEGIN(0, 0);
6013 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6014 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6015 } IEM_MC_ELSE() {
6016 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6017 } IEM_MC_ENDIF();
6018 IEM_MC_ADVANCE_RIP();
6019 IEM_MC_END();
6020 }
6021 else
6022 {
6023 /* memory target */
6024 IEM_MC_BEGIN(0, 1);
6025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6028 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6029 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6030 } IEM_MC_ELSE() {
6031 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6032 } IEM_MC_ENDIF();
6033 IEM_MC_ADVANCE_RIP();
6034 IEM_MC_END();
6035 }
6036 return VINF_SUCCESS;
6037}
6038
6039
6040/**
6041 * Common 'push segment-register' helper.
6042 */
6043FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
6044{
6045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6046 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
6047 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6048
6049 switch (pVCpu->iem.s.enmEffOpSize)
6050 {
6051 case IEMMODE_16BIT:
6052 IEM_MC_BEGIN(0, 1);
6053 IEM_MC_LOCAL(uint16_t, u16Value);
6054 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
6055 IEM_MC_PUSH_U16(u16Value);
6056 IEM_MC_ADVANCE_RIP();
6057 IEM_MC_END();
6058 break;
6059
6060 case IEMMODE_32BIT:
6061 IEM_MC_BEGIN(0, 1);
6062 IEM_MC_LOCAL(uint32_t, u32Value);
6063 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
6064 IEM_MC_PUSH_U32_SREG(u32Value);
6065 IEM_MC_ADVANCE_RIP();
6066 IEM_MC_END();
6067 break;
6068
6069 case IEMMODE_64BIT:
6070 IEM_MC_BEGIN(0, 1);
6071 IEM_MC_LOCAL(uint64_t, u64Value);
6072 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
6073 IEM_MC_PUSH_U64(u64Value);
6074 IEM_MC_ADVANCE_RIP();
6075 IEM_MC_END();
6076 break;
6077 }
6078
6079 return VINF_SUCCESS;
6080}
6081
6082
6083/** Opcode 0x0f 0xa0. */
6084FNIEMOP_DEF(iemOp_push_fs)
6085{
6086 IEMOP_MNEMONIC(push_fs, "push fs");
6087 IEMOP_HLP_MIN_386();
6088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6089 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
6090}
6091
6092
6093/** Opcode 0x0f 0xa1. */
6094FNIEMOP_DEF(iemOp_pop_fs)
6095{
6096 IEMOP_MNEMONIC(pop_fs, "pop fs");
6097 IEMOP_HLP_MIN_386();
6098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6099 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
6100}
6101
6102
6103/** Opcode 0x0f 0xa2. */
6104FNIEMOP_DEF(iemOp_cpuid)
6105{
6106 IEMOP_MNEMONIC(cpuid, "cpuid");
6107 IEMOP_HLP_MIN_486(); /* not all 486es. */
6108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6109 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
6110}
6111
6112
6113/**
6114 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
6115 * iemOp_bts_Ev_Gv.
6116 */
6117FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
6118{
6119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6120 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6121
6122 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6123 {
6124 /* register destination. */
6125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6126 switch (pVCpu->iem.s.enmEffOpSize)
6127 {
6128 case IEMMODE_16BIT:
6129 IEM_MC_BEGIN(3, 0);
6130 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6131 IEM_MC_ARG(uint16_t, u16Src, 1);
6132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6133
6134 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6135 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
6136 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6137 IEM_MC_REF_EFLAGS(pEFlags);
6138 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6139
6140 IEM_MC_ADVANCE_RIP();
6141 IEM_MC_END();
6142 return VINF_SUCCESS;
6143
6144 case IEMMODE_32BIT:
6145 IEM_MC_BEGIN(3, 0);
6146 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6147 IEM_MC_ARG(uint32_t, u32Src, 1);
6148 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6149
6150 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6151 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
6152 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6153 IEM_MC_REF_EFLAGS(pEFlags);
6154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6155
6156 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6157 IEM_MC_ADVANCE_RIP();
6158 IEM_MC_END();
6159 return VINF_SUCCESS;
6160
6161 case IEMMODE_64BIT:
6162 IEM_MC_BEGIN(3, 0);
6163 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6164 IEM_MC_ARG(uint64_t, u64Src, 1);
6165 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6166
6167 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6168 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
6169 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6170 IEM_MC_REF_EFLAGS(pEFlags);
6171 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6172
6173 IEM_MC_ADVANCE_RIP();
6174 IEM_MC_END();
6175 return VINF_SUCCESS;
6176
6177 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6178 }
6179 }
6180 else
6181 {
6182 /* memory destination. */
6183
6184 uint32_t fAccess;
6185 if (pImpl->pfnLockedU16)
6186 fAccess = IEM_ACCESS_DATA_RW;
6187 else /* BT */
6188 fAccess = IEM_ACCESS_DATA_R;
6189
6190 /** @todo test negative bit offsets! */
6191 switch (pVCpu->iem.s.enmEffOpSize)
6192 {
6193 case IEMMODE_16BIT:
6194 IEM_MC_BEGIN(3, 2);
6195 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6196 IEM_MC_ARG(uint16_t, u16Src, 1);
6197 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6199 IEM_MC_LOCAL(int16_t, i16AddrAdj);
6200
6201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6202 if (pImpl->pfnLockedU16)
6203 IEMOP_HLP_DONE_DECODING();
6204 else
6205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6206 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6207 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
6208 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
6209 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
6210 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
6211 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
6212 IEM_MC_FETCH_EFLAGS(EFlags);
6213
6214 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6215 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6216 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6217 else
6218 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6219 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6220
6221 IEM_MC_COMMIT_EFLAGS(EFlags);
6222 IEM_MC_ADVANCE_RIP();
6223 IEM_MC_END();
6224 return VINF_SUCCESS;
6225
6226 case IEMMODE_32BIT:
6227 IEM_MC_BEGIN(3, 2);
6228 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6229 IEM_MC_ARG(uint32_t, u32Src, 1);
6230 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6232 IEM_MC_LOCAL(int32_t, i32AddrAdj);
6233
6234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6235 if (pImpl->pfnLockedU16)
6236 IEMOP_HLP_DONE_DECODING();
6237 else
6238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6239 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6240 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
6241 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
6242 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
6243 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
6244 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
6245 IEM_MC_FETCH_EFLAGS(EFlags);
6246
6247 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6248 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6249 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6250 else
6251 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6252 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6253
6254 IEM_MC_COMMIT_EFLAGS(EFlags);
6255 IEM_MC_ADVANCE_RIP();
6256 IEM_MC_END();
6257 return VINF_SUCCESS;
6258
6259 case IEMMODE_64BIT:
6260 IEM_MC_BEGIN(3, 2);
6261 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6262 IEM_MC_ARG(uint64_t, u64Src, 1);
6263 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6265 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6266
6267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6268 if (pImpl->pfnLockedU16)
6269 IEMOP_HLP_DONE_DECODING();
6270 else
6271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6272 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6273 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6274 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6275 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6276 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6277 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6278 IEM_MC_FETCH_EFLAGS(EFlags);
6279
6280 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6281 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6282 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6283 else
6284 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6285 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6286
6287 IEM_MC_COMMIT_EFLAGS(EFlags);
6288 IEM_MC_ADVANCE_RIP();
6289 IEM_MC_END();
6290 return VINF_SUCCESS;
6291
6292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6293 }
6294 }
6295}
6296
6297
6298/** Opcode 0x0f 0xa3. */
6299FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6300{
6301 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6302 IEMOP_HLP_MIN_386();
6303 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6304}
6305
6306
6307/**
6308 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6309 */
6310FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6311{
6312 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6313 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6314
6315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6316 {
6317 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6319
6320 switch (pVCpu->iem.s.enmEffOpSize)
6321 {
6322 case IEMMODE_16BIT:
6323 IEM_MC_BEGIN(4, 0);
6324 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6325 IEM_MC_ARG(uint16_t, u16Src, 1);
6326 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6327 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6328
6329 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6330 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6331 IEM_MC_REF_EFLAGS(pEFlags);
6332 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6333
6334 IEM_MC_ADVANCE_RIP();
6335 IEM_MC_END();
6336 return VINF_SUCCESS;
6337
6338 case IEMMODE_32BIT:
6339 IEM_MC_BEGIN(4, 0);
6340 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6341 IEM_MC_ARG(uint32_t, u32Src, 1);
6342 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6343 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6344
6345 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6346 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6347 IEM_MC_REF_EFLAGS(pEFlags);
6348 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6349
6350 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6351 IEM_MC_ADVANCE_RIP();
6352 IEM_MC_END();
6353 return VINF_SUCCESS;
6354
6355 case IEMMODE_64BIT:
6356 IEM_MC_BEGIN(4, 0);
6357 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6358 IEM_MC_ARG(uint64_t, u64Src, 1);
6359 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6360 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6361
6362 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6363 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6364 IEM_MC_REF_EFLAGS(pEFlags);
6365 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6366
6367 IEM_MC_ADVANCE_RIP();
6368 IEM_MC_END();
6369 return VINF_SUCCESS;
6370
6371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6372 }
6373 }
6374 else
6375 {
6376 switch (pVCpu->iem.s.enmEffOpSize)
6377 {
6378 case IEMMODE_16BIT:
6379 IEM_MC_BEGIN(4, 2);
6380 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6381 IEM_MC_ARG(uint16_t, u16Src, 1);
6382 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6383 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6385
6386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6387 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6388 IEM_MC_ASSIGN(cShiftArg, cShift);
6389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6390 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6391 IEM_MC_FETCH_EFLAGS(EFlags);
6392 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6393 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6394
6395 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6396 IEM_MC_COMMIT_EFLAGS(EFlags);
6397 IEM_MC_ADVANCE_RIP();
6398 IEM_MC_END();
6399 return VINF_SUCCESS;
6400
6401 case IEMMODE_32BIT:
6402 IEM_MC_BEGIN(4, 2);
6403 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6404 IEM_MC_ARG(uint32_t, u32Src, 1);
6405 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6406 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6408
6409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6410 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6411 IEM_MC_ASSIGN(cShiftArg, cShift);
6412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6413 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6414 IEM_MC_FETCH_EFLAGS(EFlags);
6415 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6416 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6417
6418 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6419 IEM_MC_COMMIT_EFLAGS(EFlags);
6420 IEM_MC_ADVANCE_RIP();
6421 IEM_MC_END();
6422 return VINF_SUCCESS;
6423
6424 case IEMMODE_64BIT:
6425 IEM_MC_BEGIN(4, 2);
6426 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6427 IEM_MC_ARG(uint64_t, u64Src, 1);
6428 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6429 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6431
6432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6433 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6434 IEM_MC_ASSIGN(cShiftArg, cShift);
6435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6436 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6437 IEM_MC_FETCH_EFLAGS(EFlags);
6438 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6439 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6440
6441 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6442 IEM_MC_COMMIT_EFLAGS(EFlags);
6443 IEM_MC_ADVANCE_RIP();
6444 IEM_MC_END();
6445 return VINF_SUCCESS;
6446
6447 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6448 }
6449 }
6450}
6451
6452
6453/**
6454 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6455 */
6456FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6457{
6458 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6459 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6460
6461 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6462 {
6463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6464
6465 switch (pVCpu->iem.s.enmEffOpSize)
6466 {
6467 case IEMMODE_16BIT:
6468 IEM_MC_BEGIN(4, 0);
6469 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6470 IEM_MC_ARG(uint16_t, u16Src, 1);
6471 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6472 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6473
6474 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6475 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6476 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6477 IEM_MC_REF_EFLAGS(pEFlags);
6478 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6479
6480 IEM_MC_ADVANCE_RIP();
6481 IEM_MC_END();
6482 return VINF_SUCCESS;
6483
6484 case IEMMODE_32BIT:
6485 IEM_MC_BEGIN(4, 0);
6486 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6487 IEM_MC_ARG(uint32_t, u32Src, 1);
6488 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6489 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6490
6491 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6492 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6493 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6494 IEM_MC_REF_EFLAGS(pEFlags);
6495 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6496
6497 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6498 IEM_MC_ADVANCE_RIP();
6499 IEM_MC_END();
6500 return VINF_SUCCESS;
6501
6502 case IEMMODE_64BIT:
6503 IEM_MC_BEGIN(4, 0);
6504 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6505 IEM_MC_ARG(uint64_t, u64Src, 1);
6506 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6507 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6508
6509 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6510 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6511 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6512 IEM_MC_REF_EFLAGS(pEFlags);
6513 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6514
6515 IEM_MC_ADVANCE_RIP();
6516 IEM_MC_END();
6517 return VINF_SUCCESS;
6518
6519 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6520 }
6521 }
6522 else
6523 {
6524 switch (pVCpu->iem.s.enmEffOpSize)
6525 {
6526 case IEMMODE_16BIT:
6527 IEM_MC_BEGIN(4, 2);
6528 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6529 IEM_MC_ARG(uint16_t, u16Src, 1);
6530 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6531 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6533
6534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6536 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6537 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6538 IEM_MC_FETCH_EFLAGS(EFlags);
6539 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6540 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6541
6542 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6543 IEM_MC_COMMIT_EFLAGS(EFlags);
6544 IEM_MC_ADVANCE_RIP();
6545 IEM_MC_END();
6546 return VINF_SUCCESS;
6547
6548 case IEMMODE_32BIT:
6549 IEM_MC_BEGIN(4, 2);
6550 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6551 IEM_MC_ARG(uint32_t, u32Src, 1);
6552 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6553 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6555
6556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6558 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6559 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6560 IEM_MC_FETCH_EFLAGS(EFlags);
6561 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6562 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6563
6564 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6565 IEM_MC_COMMIT_EFLAGS(EFlags);
6566 IEM_MC_ADVANCE_RIP();
6567 IEM_MC_END();
6568 return VINF_SUCCESS;
6569
6570 case IEMMODE_64BIT:
6571 IEM_MC_BEGIN(4, 2);
6572 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6573 IEM_MC_ARG(uint64_t, u64Src, 1);
6574 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6575 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6577
6578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6580 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6581 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6582 IEM_MC_FETCH_EFLAGS(EFlags);
6583 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6584 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6585
6586 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6587 IEM_MC_COMMIT_EFLAGS(EFlags);
6588 IEM_MC_ADVANCE_RIP();
6589 IEM_MC_END();
6590 return VINF_SUCCESS;
6591
6592 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6593 }
6594 }
6595}
6596
6597
6598
6599/** Opcode 0x0f 0xa4. */
6600FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6601{
6602 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6603 IEMOP_HLP_MIN_386();
6604 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6605}
6606
6607
6608/** Opcode 0x0f 0xa5. */
6609FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6610{
6611 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6612 IEMOP_HLP_MIN_386();
6613 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6614}
6615
6616
6617/** Opcode 0x0f 0xa8. */
6618FNIEMOP_DEF(iemOp_push_gs)
6619{
6620 IEMOP_MNEMONIC(push_gs, "push gs");
6621 IEMOP_HLP_MIN_386();
6622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6623 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6624}
6625
6626
6627/** Opcode 0x0f 0xa9. */
6628FNIEMOP_DEF(iemOp_pop_gs)
6629{
6630 IEMOP_MNEMONIC(pop_gs, "pop gs");
6631 IEMOP_HLP_MIN_386();
6632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6633 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6634}
6635
6636
6637/** Opcode 0x0f 0xaa. */
6638FNIEMOP_DEF(iemOp_rsm)
6639{
6640 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
6641 IEMOP_HLP_MIN_386(); /* 386SL and later. */
6642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6643 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
6644}
6645
6646
6647
6648/** Opcode 0x0f 0xab. */
6649FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6650{
6651 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6652 IEMOP_HLP_MIN_386();
6653 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6654}
6655
6656
6657/** Opcode 0x0f 0xac. */
6658FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6659{
6660 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6661 IEMOP_HLP_MIN_386();
6662 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6663}
6664
6665
6666/** Opcode 0x0f 0xad. */
6667FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6668{
6669 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6670 IEMOP_HLP_MIN_386();
6671 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6672}
6673
6674
6675/** Opcode 0x0f 0xae mem/0. */
6676FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6677{
6678 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6679 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6680 return IEMOP_RAISE_INVALID_OPCODE();
6681
6682 IEM_MC_BEGIN(3, 1);
6683 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6684 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6685 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6686 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6688 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6689 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6690 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6691 IEM_MC_END();
6692 return VINF_SUCCESS;
6693}
6694
6695
6696/** Opcode 0x0f 0xae mem/1. */
6697FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6698{
6699 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6700 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6701 return IEMOP_RAISE_INVALID_OPCODE();
6702
6703 IEM_MC_BEGIN(3, 1);
6704 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6705 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6706 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6709 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6710 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6711 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6712 IEM_MC_END();
6713 return VINF_SUCCESS;
6714}
6715
6716
6717/**
6718 * @opmaps grp15
6719 * @opcode !11/2
6720 * @oppfx none
6721 * @opcpuid sse
6722 * @opgroup og_sse_mxcsrsm
6723 * @opxcpttype 5
6724 * @optest op1=0 -> mxcsr=0
6725 * @optest op1=0x2083 -> mxcsr=0x2083
6726 * @optest op1=0xfffffffe -> value.xcpt=0xd
6727 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6728 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6729 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6730 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6731 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6732 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6733 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6734 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6735 */
6736FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6737{
6738 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6739 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6740 return IEMOP_RAISE_INVALID_OPCODE();
6741
6742 IEM_MC_BEGIN(2, 0);
6743 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6744 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6747 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6748 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6749 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6750 IEM_MC_END();
6751 return VINF_SUCCESS;
6752}
6753
6754
6755/**
6756 * @opmaps grp15
6757 * @opcode !11/3
6758 * @oppfx none
6759 * @opcpuid sse
6760 * @opgroup og_sse_mxcsrsm
6761 * @opxcpttype 5
6762 * @optest mxcsr=0 -> op1=0
6763 * @optest mxcsr=0x2083 -> op1=0x2083
6764 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6765 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6766 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6767 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6768 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6769 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6770 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6771 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6772 */
6773FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6774{
6775 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6776 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6777 return IEMOP_RAISE_INVALID_OPCODE();
6778
6779 IEM_MC_BEGIN(2, 0);
6780 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6781 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6784 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6785 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6786 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6787 IEM_MC_END();
6788 return VINF_SUCCESS;
6789}
6790
6791
6792/**
6793 * @opmaps grp15
6794 * @opcode !11/4
6795 * @oppfx none
6796 * @opcpuid xsave
6797 * @opgroup og_system
6798 * @opxcpttype none
6799 */
6800FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6801{
6802 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
6803 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6804 return IEMOP_RAISE_INVALID_OPCODE();
6805
6806 IEM_MC_BEGIN(3, 0);
6807 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6808 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6809 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6812 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6813 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6814 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6815 IEM_MC_END();
6816 return VINF_SUCCESS;
6817}
6818
6819
6820/**
6821 * @opmaps grp15
6822 * @opcode !11/5
6823 * @oppfx none
6824 * @opcpuid xsave
6825 * @opgroup og_system
6826 * @opxcpttype none
6827 */
6828FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6829{
6830 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
6831 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6832 return IEMOP_RAISE_INVALID_OPCODE();
6833
6834 IEM_MC_BEGIN(3, 0);
6835 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6836 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6837 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6840 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6841 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6842 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6843 IEM_MC_END();
6844 return VINF_SUCCESS;
6845}
6846
6847/** Opcode 0x0f 0xae mem/6. */
6848FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6849
6850/**
6851 * @opmaps grp15
6852 * @opcode !11/7
6853 * @oppfx none
6854 * @opcpuid clfsh
6855 * @opgroup og_cachectl
6856 * @optest op1=1 ->
6857 */
6858FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6859{
6860 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6861 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6862 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6863
6864 IEM_MC_BEGIN(2, 0);
6865 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6866 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6869 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6870 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6871 IEM_MC_END();
6872 return VINF_SUCCESS;
6873}
6874
6875/**
6876 * @opmaps grp15
6877 * @opcode !11/7
6878 * @oppfx 0x66
6879 * @opcpuid clflushopt
6880 * @opgroup og_cachectl
6881 * @optest op1=1 ->
6882 */
6883FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6884{
6885 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6886 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6887 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6888
6889 IEM_MC_BEGIN(2, 0);
6890 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6891 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6894 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6895 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6896 IEM_MC_END();
6897 return VINF_SUCCESS;
6898}
6899
6900
6901/** Opcode 0x0f 0xae 11b/5. */
6902FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6903{
6904 RT_NOREF_PV(bRm);
6905 IEMOP_MNEMONIC(lfence, "lfence");
6906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6907 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6908 return IEMOP_RAISE_INVALID_OPCODE();
6909
6910 IEM_MC_BEGIN(0, 0);
6911 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6912 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6913 else
6914 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6915 IEM_MC_ADVANCE_RIP();
6916 IEM_MC_END();
6917 return VINF_SUCCESS;
6918}
6919
6920
6921/** Opcode 0x0f 0xae 11b/6. */
6922FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6923{
6924 RT_NOREF_PV(bRm);
6925 IEMOP_MNEMONIC(mfence, "mfence");
6926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6927 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6928 return IEMOP_RAISE_INVALID_OPCODE();
6929
6930 IEM_MC_BEGIN(0, 0);
6931 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6932 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6933 else
6934 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6935 IEM_MC_ADVANCE_RIP();
6936 IEM_MC_END();
6937 return VINF_SUCCESS;
6938}
6939
6940
6941/** Opcode 0x0f 0xae 11b/7. */
6942FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6943{
6944 RT_NOREF_PV(bRm);
6945 IEMOP_MNEMONIC(sfence, "sfence");
6946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6947 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6948 return IEMOP_RAISE_INVALID_OPCODE();
6949
6950 IEM_MC_BEGIN(0, 0);
6951 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6952 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6953 else
6954 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6955 IEM_MC_ADVANCE_RIP();
6956 IEM_MC_END();
6957 return VINF_SUCCESS;
6958}
6959
6960
6961/** Opcode 0xf3 0x0f 0xae 11b/0. */
6962FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
6963{
6964 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
6965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6966 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
6967 {
6968 IEM_MC_BEGIN(1, 0);
6969 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6970 IEM_MC_ARG(uint64_t, u64Dst, 0);
6971 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
6972 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
6973 IEM_MC_ADVANCE_RIP();
6974 IEM_MC_END();
6975 }
6976 else
6977 {
6978 IEM_MC_BEGIN(1, 0);
6979 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6980 IEM_MC_ARG(uint32_t, u32Dst, 0);
6981 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
6982 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
6983 IEM_MC_ADVANCE_RIP();
6984 IEM_MC_END();
6985 }
6986 return VINF_SUCCESS;
6987}
6988
6989
6990/** Opcode 0xf3 0x0f 0xae 11b/1. */
6991FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
6992{
6993 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
6994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6995 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
6996 {
6997 IEM_MC_BEGIN(1, 0);
6998 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6999 IEM_MC_ARG(uint64_t, u64Dst, 0);
7000 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
7001 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
7002 IEM_MC_ADVANCE_RIP();
7003 IEM_MC_END();
7004 }
7005 else
7006 {
7007 IEM_MC_BEGIN(1, 0);
7008 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7009 IEM_MC_ARG(uint32_t, u32Dst, 0);
7010 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
7011 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
7012 IEM_MC_ADVANCE_RIP();
7013 IEM_MC_END();
7014 }
7015 return VINF_SUCCESS;
7016}
7017
7018
7019/** Opcode 0xf3 0x0f 0xae 11b/2. */
7020FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
7021{
7022 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
7023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7024 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7025 {
7026 IEM_MC_BEGIN(1, 0);
7027 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7028 IEM_MC_ARG(uint64_t, u64Dst, 0);
7029 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7030 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7031 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
7032 IEM_MC_ADVANCE_RIP();
7033 IEM_MC_END();
7034 }
7035 else
7036 {
7037 IEM_MC_BEGIN(1, 0);
7038 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7039 IEM_MC_ARG(uint32_t, u32Dst, 0);
7040 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7041 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
7042 IEM_MC_ADVANCE_RIP();
7043 IEM_MC_END();
7044 }
7045 return VINF_SUCCESS;
7046}
7047
7048
7049/** Opcode 0xf3 0x0f 0xae 11b/3. */
7050FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
7051{
7052 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
7053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7054 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7055 {
7056 IEM_MC_BEGIN(1, 0);
7057 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7058 IEM_MC_ARG(uint64_t, u64Dst, 0);
7059 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7060 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7061 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
7062 IEM_MC_ADVANCE_RIP();
7063 IEM_MC_END();
7064 }
7065 else
7066 {
7067 IEM_MC_BEGIN(1, 0);
7068 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7069 IEM_MC_ARG(uint32_t, u32Dst, 0);
7070 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7071 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
7072 IEM_MC_ADVANCE_RIP();
7073 IEM_MC_END();
7074 }
7075 return VINF_SUCCESS;
7076}
7077
7078
7079/**
7080 * Group 15 jump table for register variant.
7081 */
7082IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
7083{ /* pfx: none, 066h, 0f3h, 0f2h */
7084 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
7085 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
7086 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
7087 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
7088 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7089 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7090 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7091 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7092};
7093AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
7094
7095
7096/**
7097 * Group 15 jump table for memory variant.
7098 */
7099IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
7100{ /* pfx: none, 066h, 0f3h, 0f2h */
7101 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7102 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7103 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7104 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7105 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7106 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7107 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7108 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7109};
7110AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
7111
7112
7113/** Opcode 0x0f 0xae. */
7114FNIEMOP_DEF(iemOp_Grp15)
7115{
7116 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
7117 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7118 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7119 /* register, register */
7120 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7121 + pVCpu->iem.s.idxPrefix], bRm);
7122 /* memory, register */
7123 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7124 + pVCpu->iem.s.idxPrefix], bRm);
7125}
7126
7127
7128/** Opcode 0x0f 0xaf. */
7129FNIEMOP_DEF(iemOp_imul_Gv_Ev)
7130{
7131 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
7132 IEMOP_HLP_MIN_386();
7133 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7134 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
7135}
7136
7137
7138/** Opcode 0x0f 0xb0. */
7139FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
7140{
7141 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
7142 IEMOP_HLP_MIN_486();
7143 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7144
7145 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7146 {
7147 IEMOP_HLP_DONE_DECODING();
7148 IEM_MC_BEGIN(4, 0);
7149 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7150 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7151 IEM_MC_ARG(uint8_t, u8Src, 2);
7152 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7153
7154 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7155 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7156 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
7157 IEM_MC_REF_EFLAGS(pEFlags);
7158 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7159 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7160 else
7161 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7162
7163 IEM_MC_ADVANCE_RIP();
7164 IEM_MC_END();
7165 }
7166 else
7167 {
7168 IEM_MC_BEGIN(4, 3);
7169 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7170 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7171 IEM_MC_ARG(uint8_t, u8Src, 2);
7172 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7174 IEM_MC_LOCAL(uint8_t, u8Al);
7175
7176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7177 IEMOP_HLP_DONE_DECODING();
7178 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7179 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7180 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
7181 IEM_MC_FETCH_EFLAGS(EFlags);
7182 IEM_MC_REF_LOCAL(pu8Al, u8Al);
7183 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7184 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7185 else
7186 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7187
7188 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7189 IEM_MC_COMMIT_EFLAGS(EFlags);
7190 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
7191 IEM_MC_ADVANCE_RIP();
7192 IEM_MC_END();
7193 }
7194 return VINF_SUCCESS;
7195}
7196
7197/** Opcode 0x0f 0xb1. */
7198FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
7199{
7200 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
7201 IEMOP_HLP_MIN_486();
7202 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7203
7204 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7205 {
7206 IEMOP_HLP_DONE_DECODING();
7207 switch (pVCpu->iem.s.enmEffOpSize)
7208 {
7209 case IEMMODE_16BIT:
7210 IEM_MC_BEGIN(4, 0);
7211 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7212 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7213 IEM_MC_ARG(uint16_t, u16Src, 2);
7214 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7215
7216 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7217 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7218 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
7219 IEM_MC_REF_EFLAGS(pEFlags);
7220 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7221 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7222 else
7223 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7224
7225 IEM_MC_ADVANCE_RIP();
7226 IEM_MC_END();
7227 return VINF_SUCCESS;
7228
7229 case IEMMODE_32BIT:
7230 IEM_MC_BEGIN(4, 0);
7231 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7232 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7233 IEM_MC_ARG(uint32_t, u32Src, 2);
7234 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7235
7236 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7237 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7238 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
7239 IEM_MC_REF_EFLAGS(pEFlags);
7240 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7241 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7242 else
7243 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7244
7245 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
7246 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7247 IEM_MC_ADVANCE_RIP();
7248 IEM_MC_END();
7249 return VINF_SUCCESS;
7250
7251 case IEMMODE_64BIT:
7252 IEM_MC_BEGIN(4, 0);
7253 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7254 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7255#ifdef RT_ARCH_X86
7256 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7257#else
7258 IEM_MC_ARG(uint64_t, u64Src, 2);
7259#endif
7260 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7261
7262 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7263 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
7264 IEM_MC_REF_EFLAGS(pEFlags);
7265#ifdef RT_ARCH_X86
7266 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7267 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7268 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7269 else
7270 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7271#else
7272 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7273 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7274 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7275 else
7276 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7277#endif
7278
7279 IEM_MC_ADVANCE_RIP();
7280 IEM_MC_END();
7281 return VINF_SUCCESS;
7282
7283 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7284 }
7285 }
7286 else
7287 {
7288 switch (pVCpu->iem.s.enmEffOpSize)
7289 {
7290 case IEMMODE_16BIT:
7291 IEM_MC_BEGIN(4, 3);
7292 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7293 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7294 IEM_MC_ARG(uint16_t, u16Src, 2);
7295 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7297 IEM_MC_LOCAL(uint16_t, u16Ax);
7298
7299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7300 IEMOP_HLP_DONE_DECODING();
7301 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7302 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7303 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
7304 IEM_MC_FETCH_EFLAGS(EFlags);
7305 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
7306 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7307 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7308 else
7309 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7310
7311 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7312 IEM_MC_COMMIT_EFLAGS(EFlags);
7313 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
7314 IEM_MC_ADVANCE_RIP();
7315 IEM_MC_END();
7316 return VINF_SUCCESS;
7317
7318 case IEMMODE_32BIT:
7319 IEM_MC_BEGIN(4, 3);
7320 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7321 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7322 IEM_MC_ARG(uint32_t, u32Src, 2);
7323 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7325 IEM_MC_LOCAL(uint32_t, u32Eax);
7326
7327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7328 IEMOP_HLP_DONE_DECODING();
7329 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7330 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7331 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
7332 IEM_MC_FETCH_EFLAGS(EFlags);
7333 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
7334 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7335 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7336 else
7337 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7338
7339 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7340 IEM_MC_COMMIT_EFLAGS(EFlags);
7341 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
7342 IEM_MC_ADVANCE_RIP();
7343 IEM_MC_END();
7344 return VINF_SUCCESS;
7345
7346 case IEMMODE_64BIT:
7347 IEM_MC_BEGIN(4, 3);
7348 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7349 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7350#ifdef RT_ARCH_X86
7351 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7352#else
7353 IEM_MC_ARG(uint64_t, u64Src, 2);
7354#endif
7355 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7357 IEM_MC_LOCAL(uint64_t, u64Rax);
7358
7359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7360 IEMOP_HLP_DONE_DECODING();
7361 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7362 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
7363 IEM_MC_FETCH_EFLAGS(EFlags);
7364 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
7365#ifdef RT_ARCH_X86
7366 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7367 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7368 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7369 else
7370 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7371#else
7372 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7373 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7374 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7375 else
7376 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7377#endif
7378
7379 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7380 IEM_MC_COMMIT_EFLAGS(EFlags);
7381 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
7382 IEM_MC_ADVANCE_RIP();
7383 IEM_MC_END();
7384 return VINF_SUCCESS;
7385
7386 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7387 }
7388 }
7389}
7390
7391
7392FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
7393{
7394 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
7395 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
7396
7397 switch (pVCpu->iem.s.enmEffOpSize)
7398 {
7399 case IEMMODE_16BIT:
7400 IEM_MC_BEGIN(5, 1);
7401 IEM_MC_ARG(uint16_t, uSel, 0);
7402 IEM_MC_ARG(uint16_t, offSeg, 1);
7403 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7404 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7405 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7406 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7409 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7410 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
7411 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7412 IEM_MC_END();
7413 return VINF_SUCCESS;
7414
7415 case IEMMODE_32BIT:
7416 IEM_MC_BEGIN(5, 1);
7417 IEM_MC_ARG(uint16_t, uSel, 0);
7418 IEM_MC_ARG(uint32_t, offSeg, 1);
7419 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7420 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7421 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7422 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7425 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7426 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
7427 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7428 IEM_MC_END();
7429 return VINF_SUCCESS;
7430
7431 case IEMMODE_64BIT:
7432 IEM_MC_BEGIN(5, 1);
7433 IEM_MC_ARG(uint16_t, uSel, 0);
7434 IEM_MC_ARG(uint64_t, offSeg, 1);
7435 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7436 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7437 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7438 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7441 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
7442 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7443 else
7444 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7445 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
7446 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7447 IEM_MC_END();
7448 return VINF_SUCCESS;
7449
7450 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7451 }
7452}
7453
7454
7455/** Opcode 0x0f 0xb2. */
7456FNIEMOP_DEF(iemOp_lss_Gv_Mp)
7457{
7458 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
7459 IEMOP_HLP_MIN_386();
7460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7461 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7462 return IEMOP_RAISE_INVALID_OPCODE();
7463 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
7464}
7465
7466
7467/** Opcode 0x0f 0xb3. */
7468FNIEMOP_DEF(iemOp_btr_Ev_Gv)
7469{
7470 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
7471 IEMOP_HLP_MIN_386();
7472 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
7473}
7474
7475
7476/** Opcode 0x0f 0xb4. */
7477FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
7478{
7479 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
7480 IEMOP_HLP_MIN_386();
7481 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7482 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7483 return IEMOP_RAISE_INVALID_OPCODE();
7484 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
7485}
7486
7487
7488/** Opcode 0x0f 0xb5. */
7489FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
7490{
7491 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
7492 IEMOP_HLP_MIN_386();
7493 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7494 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7495 return IEMOP_RAISE_INVALID_OPCODE();
7496 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
7497}
7498
7499
7500/** Opcode 0x0f 0xb6. */
7501FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
7502{
7503 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
7504 IEMOP_HLP_MIN_386();
7505
7506 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7507
7508 /*
7509 * If rm is denoting a register, no more instruction bytes.
7510 */
7511 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7512 {
7513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7514 switch (pVCpu->iem.s.enmEffOpSize)
7515 {
7516 case IEMMODE_16BIT:
7517 IEM_MC_BEGIN(0, 1);
7518 IEM_MC_LOCAL(uint16_t, u16Value);
7519 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7520 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7521 IEM_MC_ADVANCE_RIP();
7522 IEM_MC_END();
7523 return VINF_SUCCESS;
7524
7525 case IEMMODE_32BIT:
7526 IEM_MC_BEGIN(0, 1);
7527 IEM_MC_LOCAL(uint32_t, u32Value);
7528 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7529 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7530 IEM_MC_ADVANCE_RIP();
7531 IEM_MC_END();
7532 return VINF_SUCCESS;
7533
7534 case IEMMODE_64BIT:
7535 IEM_MC_BEGIN(0, 1);
7536 IEM_MC_LOCAL(uint64_t, u64Value);
7537 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7538 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7539 IEM_MC_ADVANCE_RIP();
7540 IEM_MC_END();
7541 return VINF_SUCCESS;
7542
7543 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7544 }
7545 }
7546 else
7547 {
7548 /*
7549 * We're loading a register from memory.
7550 */
7551 switch (pVCpu->iem.s.enmEffOpSize)
7552 {
7553 case IEMMODE_16BIT:
7554 IEM_MC_BEGIN(0, 2);
7555 IEM_MC_LOCAL(uint16_t, u16Value);
7556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7559 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7560 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7561 IEM_MC_ADVANCE_RIP();
7562 IEM_MC_END();
7563 return VINF_SUCCESS;
7564
7565 case IEMMODE_32BIT:
7566 IEM_MC_BEGIN(0, 2);
7567 IEM_MC_LOCAL(uint32_t, u32Value);
7568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7571 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7572 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7573 IEM_MC_ADVANCE_RIP();
7574 IEM_MC_END();
7575 return VINF_SUCCESS;
7576
7577 case IEMMODE_64BIT:
7578 IEM_MC_BEGIN(0, 2);
7579 IEM_MC_LOCAL(uint64_t, u64Value);
7580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7583 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7584 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7585 IEM_MC_ADVANCE_RIP();
7586 IEM_MC_END();
7587 return VINF_SUCCESS;
7588
7589 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7590 }
7591 }
7592}
7593
7594
7595/** Opcode 0x0f 0xb7. */
7596FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7597{
7598 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7599 IEMOP_HLP_MIN_386();
7600
7601 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7602
7603 /** @todo Not entirely sure how the operand size prefix is handled here,
7604 * assuming that it will be ignored. Would be nice to have a few
7605 * test for this. */
7606 /*
7607 * If rm is denoting a register, no more instruction bytes.
7608 */
7609 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7610 {
7611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7612 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7613 {
7614 IEM_MC_BEGIN(0, 1);
7615 IEM_MC_LOCAL(uint32_t, u32Value);
7616 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7617 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7618 IEM_MC_ADVANCE_RIP();
7619 IEM_MC_END();
7620 }
7621 else
7622 {
7623 IEM_MC_BEGIN(0, 1);
7624 IEM_MC_LOCAL(uint64_t, u64Value);
7625 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7626 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7627 IEM_MC_ADVANCE_RIP();
7628 IEM_MC_END();
7629 }
7630 }
7631 else
7632 {
7633 /*
7634 * We're loading a register from memory.
7635 */
7636 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7637 {
7638 IEM_MC_BEGIN(0, 2);
7639 IEM_MC_LOCAL(uint32_t, u32Value);
7640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7643 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7644 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7645 IEM_MC_ADVANCE_RIP();
7646 IEM_MC_END();
7647 }
7648 else
7649 {
7650 IEM_MC_BEGIN(0, 2);
7651 IEM_MC_LOCAL(uint64_t, u64Value);
7652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7655 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7656 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7657 IEM_MC_ADVANCE_RIP();
7658 IEM_MC_END();
7659 }
7660 }
7661 return VINF_SUCCESS;
7662}
7663
7664
7665/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7666FNIEMOP_UD_STUB(iemOp_jmpe);
7667/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7668FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7669
7670
7671/**
7672 * @opcode 0xb9
7673 * @opinvalid intel-modrm
7674 * @optest ->
7675 */
7676FNIEMOP_DEF(iemOp_Grp10)
7677{
7678 /*
7679 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7680 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7681 */
7682 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7683 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
7684 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7685}
7686
7687
7688/** Opcode 0x0f 0xba. */
7689FNIEMOP_DEF(iemOp_Grp8)
7690{
7691 IEMOP_HLP_MIN_386();
7692 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7693 PCIEMOPBINSIZES pImpl;
7694 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7695 {
7696 case 0: case 1: case 2: case 3:
7697 /* Both AMD and Intel want full modr/m decoding and imm8. */
7698 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7699 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7700 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7701 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7702 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7703 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7704 }
7705 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7706
7707 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7708 {
7709 /* register destination. */
7710 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7712
7713 switch (pVCpu->iem.s.enmEffOpSize)
7714 {
7715 case IEMMODE_16BIT:
7716 IEM_MC_BEGIN(3, 0);
7717 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7718 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7719 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7720
7721 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7722 IEM_MC_REF_EFLAGS(pEFlags);
7723 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7724
7725 IEM_MC_ADVANCE_RIP();
7726 IEM_MC_END();
7727 return VINF_SUCCESS;
7728
7729 case IEMMODE_32BIT:
7730 IEM_MC_BEGIN(3, 0);
7731 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7732 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7733 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7734
7735 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7736 IEM_MC_REF_EFLAGS(pEFlags);
7737 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7738
7739 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7740 IEM_MC_ADVANCE_RIP();
7741 IEM_MC_END();
7742 return VINF_SUCCESS;
7743
7744 case IEMMODE_64BIT:
7745 IEM_MC_BEGIN(3, 0);
7746 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7747 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7748 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7749
7750 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7751 IEM_MC_REF_EFLAGS(pEFlags);
7752 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7753
7754 IEM_MC_ADVANCE_RIP();
7755 IEM_MC_END();
7756 return VINF_SUCCESS;
7757
7758 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7759 }
7760 }
7761 else
7762 {
7763 /* memory destination. */
7764
7765 uint32_t fAccess;
7766 if (pImpl->pfnLockedU16)
7767 fAccess = IEM_ACCESS_DATA_RW;
7768 else /* BT */
7769 fAccess = IEM_ACCESS_DATA_R;
7770
7771 /** @todo test negative bit offsets! */
7772 switch (pVCpu->iem.s.enmEffOpSize)
7773 {
7774 case IEMMODE_16BIT:
7775 IEM_MC_BEGIN(3, 1);
7776 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7777 IEM_MC_ARG(uint16_t, u16Src, 1);
7778 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7780
7781 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7782 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7783 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7784 if (pImpl->pfnLockedU16)
7785 IEMOP_HLP_DONE_DECODING();
7786 else
7787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7788 IEM_MC_FETCH_EFLAGS(EFlags);
7789 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7790 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7791 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7792 else
7793 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7794 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7795
7796 IEM_MC_COMMIT_EFLAGS(EFlags);
7797 IEM_MC_ADVANCE_RIP();
7798 IEM_MC_END();
7799 return VINF_SUCCESS;
7800
7801 case IEMMODE_32BIT:
7802 IEM_MC_BEGIN(3, 1);
7803 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7804 IEM_MC_ARG(uint32_t, u32Src, 1);
7805 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7807
7808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7809 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7810 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7811 if (pImpl->pfnLockedU16)
7812 IEMOP_HLP_DONE_DECODING();
7813 else
7814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7815 IEM_MC_FETCH_EFLAGS(EFlags);
7816 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7817 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7818 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7819 else
7820 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7821 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7822
7823 IEM_MC_COMMIT_EFLAGS(EFlags);
7824 IEM_MC_ADVANCE_RIP();
7825 IEM_MC_END();
7826 return VINF_SUCCESS;
7827
7828 case IEMMODE_64BIT:
7829 IEM_MC_BEGIN(3, 1);
7830 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7831 IEM_MC_ARG(uint64_t, u64Src, 1);
7832 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7834
7835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7836 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7837 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7838 if (pImpl->pfnLockedU16)
7839 IEMOP_HLP_DONE_DECODING();
7840 else
7841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7842 IEM_MC_FETCH_EFLAGS(EFlags);
7843 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7844 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7845 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7846 else
7847 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7848 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7849
7850 IEM_MC_COMMIT_EFLAGS(EFlags);
7851 IEM_MC_ADVANCE_RIP();
7852 IEM_MC_END();
7853 return VINF_SUCCESS;
7854
7855 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7856 }
7857 }
7858}
7859
7860
7861/** Opcode 0x0f 0xbb. */
7862FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7863{
7864 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7865 IEMOP_HLP_MIN_386();
7866 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7867}
7868
7869
7870/** Opcode 0x0f 0xbc. */
7871FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7872{
7873 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7874 IEMOP_HLP_MIN_386();
7875 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7876 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7877}
7878
7879
7880/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7881FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7882
7883
7884/** Opcode 0x0f 0xbd. */
7885FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7886{
7887 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7888 IEMOP_HLP_MIN_386();
7889 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7890 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7891}
7892
7893
7894/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7895FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7896
7897
7898/** Opcode 0x0f 0xbe. */
7899FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7900{
7901 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7902 IEMOP_HLP_MIN_386();
7903
7904 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7905
7906 /*
7907 * If rm is denoting a register, no more instruction bytes.
7908 */
7909 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7910 {
7911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7912 switch (pVCpu->iem.s.enmEffOpSize)
7913 {
7914 case IEMMODE_16BIT:
7915 IEM_MC_BEGIN(0, 1);
7916 IEM_MC_LOCAL(uint16_t, u16Value);
7917 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7918 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7919 IEM_MC_ADVANCE_RIP();
7920 IEM_MC_END();
7921 return VINF_SUCCESS;
7922
7923 case IEMMODE_32BIT:
7924 IEM_MC_BEGIN(0, 1);
7925 IEM_MC_LOCAL(uint32_t, u32Value);
7926 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7927 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7928 IEM_MC_ADVANCE_RIP();
7929 IEM_MC_END();
7930 return VINF_SUCCESS;
7931
7932 case IEMMODE_64BIT:
7933 IEM_MC_BEGIN(0, 1);
7934 IEM_MC_LOCAL(uint64_t, u64Value);
7935 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7936 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7937 IEM_MC_ADVANCE_RIP();
7938 IEM_MC_END();
7939 return VINF_SUCCESS;
7940
7941 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7942 }
7943 }
7944 else
7945 {
7946 /*
7947 * We're loading a register from memory.
7948 */
7949 switch (pVCpu->iem.s.enmEffOpSize)
7950 {
7951 case IEMMODE_16BIT:
7952 IEM_MC_BEGIN(0, 2);
7953 IEM_MC_LOCAL(uint16_t, u16Value);
7954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7957 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7958 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7959 IEM_MC_ADVANCE_RIP();
7960 IEM_MC_END();
7961 return VINF_SUCCESS;
7962
7963 case IEMMODE_32BIT:
7964 IEM_MC_BEGIN(0, 2);
7965 IEM_MC_LOCAL(uint32_t, u32Value);
7966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7967 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7969 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7970 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7971 IEM_MC_ADVANCE_RIP();
7972 IEM_MC_END();
7973 return VINF_SUCCESS;
7974
7975 case IEMMODE_64BIT:
7976 IEM_MC_BEGIN(0, 2);
7977 IEM_MC_LOCAL(uint64_t, u64Value);
7978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7979 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7981 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7982 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7983 IEM_MC_ADVANCE_RIP();
7984 IEM_MC_END();
7985 return VINF_SUCCESS;
7986
7987 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7988 }
7989 }
7990}
7991
7992
7993/** Opcode 0x0f 0xbf. */
7994FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7995{
7996 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7997 IEMOP_HLP_MIN_386();
7998
7999 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8000
8001 /** @todo Not entirely sure how the operand size prefix is handled here,
8002 * assuming that it will be ignored. Would be nice to have a few
8003 * test for this. */
8004 /*
8005 * If rm is denoting a register, no more instruction bytes.
8006 */
8007 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8008 {
8009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8010 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8011 {
8012 IEM_MC_BEGIN(0, 1);
8013 IEM_MC_LOCAL(uint32_t, u32Value);
8014 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8015 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8016 IEM_MC_ADVANCE_RIP();
8017 IEM_MC_END();
8018 }
8019 else
8020 {
8021 IEM_MC_BEGIN(0, 1);
8022 IEM_MC_LOCAL(uint64_t, u64Value);
8023 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8024 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8025 IEM_MC_ADVANCE_RIP();
8026 IEM_MC_END();
8027 }
8028 }
8029 else
8030 {
8031 /*
8032 * We're loading a register from memory.
8033 */
8034 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8035 {
8036 IEM_MC_BEGIN(0, 2);
8037 IEM_MC_LOCAL(uint32_t, u32Value);
8038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8041 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8042 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8043 IEM_MC_ADVANCE_RIP();
8044 IEM_MC_END();
8045 }
8046 else
8047 {
8048 IEM_MC_BEGIN(0, 2);
8049 IEM_MC_LOCAL(uint64_t, u64Value);
8050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8053 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8054 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8055 IEM_MC_ADVANCE_RIP();
8056 IEM_MC_END();
8057 }
8058 }
8059 return VINF_SUCCESS;
8060}
8061
8062
8063/** Opcode 0x0f 0xc0. */
8064FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
8065{
8066 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8067 IEMOP_HLP_MIN_486();
8068 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
8069
8070 /*
8071 * If rm is denoting a register, no more instruction bytes.
8072 */
8073 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8074 {
8075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8076
8077 IEM_MC_BEGIN(3, 0);
8078 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8079 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8080 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8081
8082 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8083 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8084 IEM_MC_REF_EFLAGS(pEFlags);
8085 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8086
8087 IEM_MC_ADVANCE_RIP();
8088 IEM_MC_END();
8089 }
8090 else
8091 {
8092 /*
8093 * We're accessing memory.
8094 */
8095 IEM_MC_BEGIN(3, 3);
8096 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8097 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8098 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8099 IEM_MC_LOCAL(uint8_t, u8RegCopy);
8100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8101
8102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8103 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8104 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8105 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
8106 IEM_MC_FETCH_EFLAGS(EFlags);
8107 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8108 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8109 else
8110 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
8111
8112 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
8113 IEM_MC_COMMIT_EFLAGS(EFlags);
8114 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
8115 IEM_MC_ADVANCE_RIP();
8116 IEM_MC_END();
8117 return VINF_SUCCESS;
8118 }
8119 return VINF_SUCCESS;
8120}
8121
8122
8123/** Opcode 0x0f 0xc1. */
8124FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
8125{
8126 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
8127 IEMOP_HLP_MIN_486();
8128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8129
8130 /*
8131 * If rm is denoting a register, no more instruction bytes.
8132 */
8133 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8134 {
8135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8136
8137 switch (pVCpu->iem.s.enmEffOpSize)
8138 {
8139 case IEMMODE_16BIT:
8140 IEM_MC_BEGIN(3, 0);
8141 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8142 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8143 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8144
8145 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8146 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8147 IEM_MC_REF_EFLAGS(pEFlags);
8148 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8149
8150 IEM_MC_ADVANCE_RIP();
8151 IEM_MC_END();
8152 return VINF_SUCCESS;
8153
8154 case IEMMODE_32BIT:
8155 IEM_MC_BEGIN(3, 0);
8156 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8157 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8158 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8159
8160 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8161 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8162 IEM_MC_REF_EFLAGS(pEFlags);
8163 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8164
8165 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8166 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
8167 IEM_MC_ADVANCE_RIP();
8168 IEM_MC_END();
8169 return VINF_SUCCESS;
8170
8171 case IEMMODE_64BIT:
8172 IEM_MC_BEGIN(3, 0);
8173 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8174 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8175 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8176
8177 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8178 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8179 IEM_MC_REF_EFLAGS(pEFlags);
8180 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8181
8182 IEM_MC_ADVANCE_RIP();
8183 IEM_MC_END();
8184 return VINF_SUCCESS;
8185
8186 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8187 }
8188 }
8189 else
8190 {
8191 /*
8192 * We're accessing memory.
8193 */
8194 switch (pVCpu->iem.s.enmEffOpSize)
8195 {
8196 case IEMMODE_16BIT:
8197 IEM_MC_BEGIN(3, 3);
8198 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8199 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8200 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8201 IEM_MC_LOCAL(uint16_t, u16RegCopy);
8202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8203
8204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8205 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8206 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8207 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
8208 IEM_MC_FETCH_EFLAGS(EFlags);
8209 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8210 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8211 else
8212 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
8213
8214 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8215 IEM_MC_COMMIT_EFLAGS(EFlags);
8216 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
8217 IEM_MC_ADVANCE_RIP();
8218 IEM_MC_END();
8219 return VINF_SUCCESS;
8220
8221 case IEMMODE_32BIT:
8222 IEM_MC_BEGIN(3, 3);
8223 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8224 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8225 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8226 IEM_MC_LOCAL(uint32_t, u32RegCopy);
8227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8228
8229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8230 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8231 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8232 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
8233 IEM_MC_FETCH_EFLAGS(EFlags);
8234 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8235 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8236 else
8237 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
8238
8239 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8240 IEM_MC_COMMIT_EFLAGS(EFlags);
8241 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
8242 IEM_MC_ADVANCE_RIP();
8243 IEM_MC_END();
8244 return VINF_SUCCESS;
8245
8246 case IEMMODE_64BIT:
8247 IEM_MC_BEGIN(3, 3);
8248 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8249 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8250 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8251 IEM_MC_LOCAL(uint64_t, u64RegCopy);
8252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8253
8254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8255 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8256 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8257 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
8258 IEM_MC_FETCH_EFLAGS(EFlags);
8259 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8260 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8261 else
8262 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
8263
8264 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8265 IEM_MC_COMMIT_EFLAGS(EFlags);
8266 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
8267 IEM_MC_ADVANCE_RIP();
8268 IEM_MC_END();
8269 return VINF_SUCCESS;
8270
8271 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8272 }
8273 }
8274}
8275
8276
8277/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
8278FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
8279/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
8280FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
8281/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
8282FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
8283/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
8284FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
8285
8286
8287/** Opcode 0x0f 0xc3. */
8288FNIEMOP_DEF(iemOp_movnti_My_Gy)
8289{
8290 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
8291
8292 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8293
8294 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
8295 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8296 {
8297 switch (pVCpu->iem.s.enmEffOpSize)
8298 {
8299 case IEMMODE_32BIT:
8300 IEM_MC_BEGIN(0, 2);
8301 IEM_MC_LOCAL(uint32_t, u32Value);
8302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8303
8304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8306 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8307 return IEMOP_RAISE_INVALID_OPCODE();
8308
8309 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8310 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
8311 IEM_MC_ADVANCE_RIP();
8312 IEM_MC_END();
8313 break;
8314
8315 case IEMMODE_64BIT:
8316 IEM_MC_BEGIN(0, 2);
8317 IEM_MC_LOCAL(uint64_t, u64Value);
8318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8319
8320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8322 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8323 return IEMOP_RAISE_INVALID_OPCODE();
8324
8325 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8326 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
8327 IEM_MC_ADVANCE_RIP();
8328 IEM_MC_END();
8329 break;
8330
8331 case IEMMODE_16BIT:
8332 /** @todo check this form. */
8333 return IEMOP_RAISE_INVALID_OPCODE();
8334 }
8335 }
8336 else
8337 return IEMOP_RAISE_INVALID_OPCODE();
8338 return VINF_SUCCESS;
8339}
8340/* Opcode 0x66 0x0f 0xc3 - invalid */
8341/* Opcode 0xf3 0x0f 0xc3 - invalid */
8342/* Opcode 0xf2 0x0f 0xc3 - invalid */
8343
8344/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
8345FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
8346/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
8347FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
8348/* Opcode 0xf3 0x0f 0xc4 - invalid */
8349/* Opcode 0xf2 0x0f 0xc4 - invalid */
8350
8351/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
8352FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
8353/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
8354FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
8355/* Opcode 0xf3 0x0f 0xc5 - invalid */
8356/* Opcode 0xf2 0x0f 0xc5 - invalid */
8357
8358/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
8359FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
8360/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
8361FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
8362/* Opcode 0xf3 0x0f 0xc6 - invalid */
8363/* Opcode 0xf2 0x0f 0xc6 - invalid */
8364
8365
8366/** Opcode 0x0f 0xc7 !11/1. */
8367FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
8368{
8369 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
8370
8371 IEM_MC_BEGIN(4, 3);
8372 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
8373 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
8374 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
8375 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8376 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
8377 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
8378 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8379
8380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8381 IEMOP_HLP_DONE_DECODING();
8382 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8383
8384 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
8385 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
8386 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
8387
8388 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
8389 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
8390 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
8391
8392 IEM_MC_FETCH_EFLAGS(EFlags);
8393 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8394 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8395 else
8396 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8397
8398 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
8399 IEM_MC_COMMIT_EFLAGS(EFlags);
8400 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8401 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
8402 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
8403 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
8404 IEM_MC_ENDIF();
8405 IEM_MC_ADVANCE_RIP();
8406
8407 IEM_MC_END();
8408 return VINF_SUCCESS;
8409}
8410
8411
8412/** Opcode REX.W 0x0f 0xc7 !11/1. */
8413FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
8414{
8415 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
8416 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8417 {
8418#if 0
8419 RT_NOREF(bRm);
8420 IEMOP_BITCH_ABOUT_STUB();
8421 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8422#else
8423 IEM_MC_BEGIN(4, 3);
8424 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
8425 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
8426 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
8427 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8428 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
8429 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
8430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8431
8432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8433 IEMOP_HLP_DONE_DECODING();
8434 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
8435 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8436
8437 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
8438 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
8439 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
8440
8441 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
8442 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
8443 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
8444
8445 IEM_MC_FETCH_EFLAGS(EFlags);
8446# ifdef RT_ARCH_AMD64
8447 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8448 {
8449 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8450 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8451 else
8452 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8453 }
8454 else
8455# endif
8456 {
8457 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
8458 accesses and not all all atomic, which works fine on in UNI CPU guest
8459 configuration (ignoring DMA). If guest SMP is active we have no choice
8460 but to use a rendezvous callback here. Sigh. */
8461 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
8462 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8463 else
8464 {
8465 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8466 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
8467 }
8468 }
8469
8470 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
8471 IEM_MC_COMMIT_EFLAGS(EFlags);
8472 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8473 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
8474 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
8475 IEM_MC_ENDIF();
8476 IEM_MC_ADVANCE_RIP();
8477
8478 IEM_MC_END();
8479 return VINF_SUCCESS;
8480#endif
8481 }
8482 Log(("cmpxchg16b -> #UD\n"));
8483 return IEMOP_RAISE_INVALID_OPCODE();
8484}
8485
8486FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
8487{
8488 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
8489 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
8490 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
8491}
8492
8493/** Opcode 0x0f 0xc7 11/6. */
8494FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
8495
8496/** Opcode 0x0f 0xc7 !11/6. */
8497#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8498FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
8499{
8500 IEMOP_MNEMONIC(vmptrld, "vmptrld");
8501 IEMOP_HLP_IN_VMX_OPERATION();
8502 IEMOP_HLP_VMX_INSTR();
8503 IEM_MC_BEGIN(1, 0);
8504 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 0);
8505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8506 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
8507 IEM_MC_CALL_CIMPL_1(iemCImpl_vmptrld, GCPtrEffSrc);
8508 IEM_MC_END();
8509 return VINF_SUCCESS;
8510}
8511#else
8512FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
8513#endif
8514
8515/** Opcode 0x66 0x0f 0xc7 !11/6. */
8516#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8517FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
8518{
8519 IEMOP_MNEMONIC(vmclear, "vmclear");
8520 IEMOP_HLP_IN_VMX_OPERATION();
8521 IEMOP_HLP_VMX_INSTR();
8522 IEM_MC_BEGIN(1, 0);
8523 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
8524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8525 IEMOP_HLP_DONE_DECODING();
8526 IEM_MC_CALL_CIMPL_1(iemCImpl_vmclear, GCPtrEffDst);
8527 IEM_MC_END();
8528 return VINF_SUCCESS;
8529}
8530#else
8531FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
8532#endif
8533
8534/** Opcode 0xf3 0x0f 0xc7 !11/6. */
8535#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8536FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
8537{
8538 IEMOP_MNEMONIC(vmxon, "vmxon");
8539 IEMOP_HLP_VMX_INSTR();
8540 IEM_MC_BEGIN(1, 0);
8541 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 0);
8542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8543 IEMOP_HLP_DONE_DECODING();
8544 IEM_MC_CALL_CIMPL_1(iemCImpl_vmxon, GCPtrEffSrc);
8545 IEM_MC_END();
8546 return VINF_SUCCESS;
8547}
8548#else
8549FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
8550#endif
8551
8552/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
8553#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8554FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
8555{
8556 IEMOP_MNEMONIC(vmptrst, "vmptrst");
8557 IEMOP_HLP_IN_VMX_OPERATION();
8558 IEMOP_HLP_VMX_INSTR();
8559 IEM_MC_BEGIN(1, 0);
8560 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
8561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8562 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
8563 IEM_MC_CALL_CIMPL_1(iemCImpl_vmptrst, GCPtrEffDst);
8564 IEM_MC_END();
8565 return VINF_SUCCESS;
8566}
8567#else
8568FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
8569#endif
8570
8571/** Opcode 0x0f 0xc7 11/7. */
8572FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
8573
8574
8575/**
8576 * Group 9 jump table for register variant.
8577 */
8578IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
8579{ /* pfx: none, 066h, 0f3h, 0f2h */
8580 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8581 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
8582 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8583 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8584 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8585 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8586 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8587 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8588};
8589AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
8590
8591
8592/**
8593 * Group 9 jump table for memory variant.
8594 */
8595IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
8596{ /* pfx: none, 066h, 0f3h, 0f2h */
8597 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8598 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
8599 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8600 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8601 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8602 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8603 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
8604 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8605};
8606AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
8607
8608
8609/** Opcode 0x0f 0xc7. */
8610FNIEMOP_DEF(iemOp_Grp9)
8611{
8612 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
8613 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8614 /* register, register */
8615 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8616 + pVCpu->iem.s.idxPrefix], bRm);
8617 /* memory, register */
8618 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8619 + pVCpu->iem.s.idxPrefix], bRm);
8620}
8621
8622
8623/**
8624 * Common 'bswap register' helper.
8625 */
8626FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
8627{
8628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8629 switch (pVCpu->iem.s.enmEffOpSize)
8630 {
8631 case IEMMODE_16BIT:
8632 IEM_MC_BEGIN(1, 0);
8633 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8634 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
8635 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
8636 IEM_MC_ADVANCE_RIP();
8637 IEM_MC_END();
8638 return VINF_SUCCESS;
8639
8640 case IEMMODE_32BIT:
8641 IEM_MC_BEGIN(1, 0);
8642 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8643 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8644 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8645 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
8646 IEM_MC_ADVANCE_RIP();
8647 IEM_MC_END();
8648 return VINF_SUCCESS;
8649
8650 case IEMMODE_64BIT:
8651 IEM_MC_BEGIN(1, 0);
8652 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8653 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8654 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
8655 IEM_MC_ADVANCE_RIP();
8656 IEM_MC_END();
8657 return VINF_SUCCESS;
8658
8659 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8660 }
8661}
8662
8663
8664/** Opcode 0x0f 0xc8. */
8665FNIEMOP_DEF(iemOp_bswap_rAX_r8)
8666{
8667 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
8668 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
8669 prefix. REX.B is the correct prefix it appears. For a parallel
8670 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
8671 IEMOP_HLP_MIN_486();
8672 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8673}
8674
8675
8676/** Opcode 0x0f 0xc9. */
8677FNIEMOP_DEF(iemOp_bswap_rCX_r9)
8678{
8679 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
8680 IEMOP_HLP_MIN_486();
8681 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8682}
8683
8684
8685/** Opcode 0x0f 0xca. */
8686FNIEMOP_DEF(iemOp_bswap_rDX_r10)
8687{
8688 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
8689 IEMOP_HLP_MIN_486();
8690 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8691}
8692
8693
8694/** Opcode 0x0f 0xcb. */
8695FNIEMOP_DEF(iemOp_bswap_rBX_r11)
8696{
8697 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
8698 IEMOP_HLP_MIN_486();
8699 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8700}
8701
8702
8703/** Opcode 0x0f 0xcc. */
8704FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8705{
8706 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8707 IEMOP_HLP_MIN_486();
8708 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8709}
8710
8711
8712/** Opcode 0x0f 0xcd. */
8713FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8714{
8715 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8716 IEMOP_HLP_MIN_486();
8717 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8718}
8719
8720
8721/** Opcode 0x0f 0xce. */
8722FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8723{
8724 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8725 IEMOP_HLP_MIN_486();
8726 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8727}
8728
8729
8730/** Opcode 0x0f 0xcf. */
8731FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8732{
8733 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8734 IEMOP_HLP_MIN_486();
8735 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8736}
8737
8738
8739/* Opcode 0x0f 0xd0 - invalid */
8740/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8741FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8742/* Opcode 0xf3 0x0f 0xd0 - invalid */
8743/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8744FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8745
8746/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8747FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8748/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8749FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8750/* Opcode 0xf3 0x0f 0xd1 - invalid */
8751/* Opcode 0xf2 0x0f 0xd1 - invalid */
8752
8753/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8754FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8755/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8756FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8757/* Opcode 0xf3 0x0f 0xd2 - invalid */
8758/* Opcode 0xf2 0x0f 0xd2 - invalid */
8759
8760/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8761FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8762/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8763FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8764/* Opcode 0xf3 0x0f 0xd3 - invalid */
8765/* Opcode 0xf2 0x0f 0xd3 - invalid */
8766
8767/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8768FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8769/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8770FNIEMOP_STUB(iemOp_paddq_Vx_W);
8771/* Opcode 0xf3 0x0f 0xd4 - invalid */
8772/* Opcode 0xf2 0x0f 0xd4 - invalid */
8773
8774/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8775FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8776/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8777FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8778/* Opcode 0xf3 0x0f 0xd5 - invalid */
8779/* Opcode 0xf2 0x0f 0xd5 - invalid */
8780
8781/* Opcode 0x0f 0xd6 - invalid */
8782
8783/**
8784 * @opcode 0xd6
8785 * @oppfx 0x66
8786 * @opcpuid sse2
8787 * @opgroup og_sse2_pcksclr_datamove
8788 * @opxcpttype none
8789 * @optest op1=-1 op2=2 -> op1=2
8790 * @optest op1=0 op2=-42 -> op1=-42
8791 */
8792FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8793{
8794 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8795 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8796 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8797 {
8798 /*
8799 * Register, register.
8800 */
8801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8802 IEM_MC_BEGIN(0, 2);
8803 IEM_MC_LOCAL(uint64_t, uSrc);
8804
8805 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8806 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8807
8808 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8809 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8810
8811 IEM_MC_ADVANCE_RIP();
8812 IEM_MC_END();
8813 }
8814 else
8815 {
8816 /*
8817 * Memory, register.
8818 */
8819 IEM_MC_BEGIN(0, 2);
8820 IEM_MC_LOCAL(uint64_t, uSrc);
8821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8822
8823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8825 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8826 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8827
8828 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8829 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8830
8831 IEM_MC_ADVANCE_RIP();
8832 IEM_MC_END();
8833 }
8834 return VINF_SUCCESS;
8835}
8836
8837
8838/**
8839 * @opcode 0xd6
8840 * @opcodesub 11 mr/reg
8841 * @oppfx f3
8842 * @opcpuid sse2
8843 * @opgroup og_sse2_simdint_datamove
8844 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8845 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8846 */
8847FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
8848{
8849 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8850 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8851 {
8852 /*
8853 * Register, register.
8854 */
8855 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8857 IEM_MC_BEGIN(0, 1);
8858 IEM_MC_LOCAL(uint64_t, uSrc);
8859
8860 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8861 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8862
8863 IEM_MC_FETCH_MREG_U64(uSrc, bRm & X86_MODRM_RM_MASK);
8864 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
8865 IEM_MC_FPU_TO_MMX_MODE();
8866
8867 IEM_MC_ADVANCE_RIP();
8868 IEM_MC_END();
8869 return VINF_SUCCESS;
8870 }
8871
8872 /**
8873 * @opdone
8874 * @opmnemonic udf30fd6mem
8875 * @opcode 0xd6
8876 * @opcodesub !11 mr/reg
8877 * @oppfx f3
8878 * @opunused intel-modrm
8879 * @opcpuid sse
8880 * @optest ->
8881 */
8882 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8883}
8884
8885
8886/**
8887 * @opcode 0xd6
8888 * @opcodesub 11 mr/reg
8889 * @oppfx f2
8890 * @opcpuid sse2
8891 * @opgroup og_sse2_simdint_datamove
8892 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8893 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8894 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
8895 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
8896 * @optest op1=-42 op2=0xfedcba9876543210
8897 * -> op1=0xfedcba9876543210 ftw=0xff
8898 */
8899FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
8900{
8901 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8902 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8903 {
8904 /*
8905 * Register, register.
8906 */
8907 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8909 IEM_MC_BEGIN(0, 1);
8910 IEM_MC_LOCAL(uint64_t, uSrc);
8911
8912 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8913 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8914
8915 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8916 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, uSrc);
8917 IEM_MC_FPU_TO_MMX_MODE();
8918
8919 IEM_MC_ADVANCE_RIP();
8920 IEM_MC_END();
8921 return VINF_SUCCESS;
8922 }
8923
8924 /**
8925 * @opdone
8926 * @opmnemonic udf20fd6mem
8927 * @opcode 0xd6
8928 * @opcodesub !11 mr/reg
8929 * @oppfx f2
8930 * @opunused intel-modrm
8931 * @opcpuid sse
8932 * @optest ->
8933 */
8934 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8935}
8936
8937/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
8938FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
8939{
8940 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8941 /** @todo testcase: Check that the instruction implicitly clears the high
8942 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8943 * and opcode modifications are made to work with the whole width (not
8944 * just 128). */
8945 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
8946 /* Docs says register only. */
8947 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8948 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8949 {
8950 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8951 IEM_MC_BEGIN(2, 0);
8952 IEM_MC_ARG(uint64_t *, pDst, 0);
8953 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8954 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8955 IEM_MC_PREPARE_FPU_USAGE();
8956 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8957 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8958 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8959 IEM_MC_ADVANCE_RIP();
8960 IEM_MC_END();
8961 return VINF_SUCCESS;
8962 }
8963 return IEMOP_RAISE_INVALID_OPCODE();
8964}
8965
8966/** Opcode 0x66 0x0f 0xd7 - */
8967FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
8968{
8969 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8970 /** @todo testcase: Check that the instruction implicitly clears the high
8971 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8972 * and opcode modifications are made to work with the whole width (not
8973 * just 128). */
8974 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
8975 /* Docs says register only. */
8976 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8977 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8978 {
8979 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8980 IEM_MC_BEGIN(2, 0);
8981 IEM_MC_ARG(uint64_t *, pDst, 0);
8982 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8983 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8984 IEM_MC_PREPARE_SSE_USAGE();
8985 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8986 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8987 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8988 IEM_MC_ADVANCE_RIP();
8989 IEM_MC_END();
8990 return VINF_SUCCESS;
8991 }
8992 return IEMOP_RAISE_INVALID_OPCODE();
8993}
8994
8995/* Opcode 0xf3 0x0f 0xd7 - invalid */
8996/* Opcode 0xf2 0x0f 0xd7 - invalid */
8997
8998
8999/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
9000FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
9001/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
9002FNIEMOP_STUB(iemOp_psubusb_Vx_W);
9003/* Opcode 0xf3 0x0f 0xd8 - invalid */
9004/* Opcode 0xf2 0x0f 0xd8 - invalid */
9005
9006/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
9007FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
9008/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
9009FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
9010/* Opcode 0xf3 0x0f 0xd9 - invalid */
9011/* Opcode 0xf2 0x0f 0xd9 - invalid */
9012
9013/** Opcode 0x0f 0xda - pminub Pq, Qq */
9014FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
9015/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
9016FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
9017/* Opcode 0xf3 0x0f 0xda - invalid */
9018/* Opcode 0xf2 0x0f 0xda - invalid */
9019
9020/** Opcode 0x0f 0xdb - pand Pq, Qq */
9021FNIEMOP_STUB(iemOp_pand_Pq_Qq);
9022/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
9023FNIEMOP_STUB(iemOp_pand_Vx_W);
9024/* Opcode 0xf3 0x0f 0xdb - invalid */
9025/* Opcode 0xf2 0x0f 0xdb - invalid */
9026
9027/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
9028FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
9029/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
9030FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
9031/* Opcode 0xf3 0x0f 0xdc - invalid */
9032/* Opcode 0xf2 0x0f 0xdc - invalid */
9033
9034/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
9035FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
9036/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
9037FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
9038/* Opcode 0xf3 0x0f 0xdd - invalid */
9039/* Opcode 0xf2 0x0f 0xdd - invalid */
9040
9041/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
9042FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
9043/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
9044FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
9045/* Opcode 0xf3 0x0f 0xde - invalid */
9046/* Opcode 0xf2 0x0f 0xde - invalid */
9047
9048/** Opcode 0x0f 0xdf - pandn Pq, Qq */
9049FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
9050/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
9051FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
9052/* Opcode 0xf3 0x0f 0xdf - invalid */
9053/* Opcode 0xf2 0x0f 0xdf - invalid */
9054
9055/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
9056FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
9057/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
9058FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
9059/* Opcode 0xf3 0x0f 0xe0 - invalid */
9060/* Opcode 0xf2 0x0f 0xe0 - invalid */
9061
9062/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
9063FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
9064/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
9065FNIEMOP_STUB(iemOp_psraw_Vx_W);
9066/* Opcode 0xf3 0x0f 0xe1 - invalid */
9067/* Opcode 0xf2 0x0f 0xe1 - invalid */
9068
9069/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
9070FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
9071/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
9072FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
9073/* Opcode 0xf3 0x0f 0xe2 - invalid */
9074/* Opcode 0xf2 0x0f 0xe2 - invalid */
9075
9076/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
9077FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
9078/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
9079FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
9080/* Opcode 0xf3 0x0f 0xe3 - invalid */
9081/* Opcode 0xf2 0x0f 0xe3 - invalid */
9082
9083/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
9084FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
9085/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
9086FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
9087/* Opcode 0xf3 0x0f 0xe4 - invalid */
9088/* Opcode 0xf2 0x0f 0xe4 - invalid */
9089
9090/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
9091FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
9092/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
9093FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
9094/* Opcode 0xf3 0x0f 0xe5 - invalid */
9095/* Opcode 0xf2 0x0f 0xe5 - invalid */
9096
9097/* Opcode 0x0f 0xe6 - invalid */
9098/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
9099FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
9100/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
9101FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
9102/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
9103FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
9104
9105
9106/**
9107 * @opcode 0xe7
9108 * @opcodesub !11 mr/reg
9109 * @oppfx none
9110 * @opcpuid sse
9111 * @opgroup og_sse1_cachect
9112 * @opxcpttype none
9113 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
9114 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
9115 */
9116FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
9117{
9118 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9120 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9121 {
9122 /* Register, memory. */
9123 IEM_MC_BEGIN(0, 2);
9124 IEM_MC_LOCAL(uint64_t, uSrc);
9125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9126
9127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9129 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
9130 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9131
9132 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9133 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9134 IEM_MC_FPU_TO_MMX_MODE();
9135
9136 IEM_MC_ADVANCE_RIP();
9137 IEM_MC_END();
9138 return VINF_SUCCESS;
9139 }
9140 /**
9141 * @opdone
9142 * @opmnemonic ud0fe7reg
9143 * @opcode 0xe7
9144 * @opcodesub 11 mr/reg
9145 * @oppfx none
9146 * @opunused immediate
9147 * @opcpuid sse
9148 * @optest ->
9149 */
9150 return IEMOP_RAISE_INVALID_OPCODE();
9151}
9152
9153/**
9154 * @opcode 0xe7
9155 * @opcodesub !11 mr/reg
9156 * @oppfx 0x66
9157 * @opcpuid sse2
9158 * @opgroup og_sse2_cachect
9159 * @opxcpttype 1
9160 * @optest op1=-1 op2=2 -> op1=2
9161 * @optest op1=0 op2=-42 -> op1=-42
9162 */
9163FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
9164{
9165 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9167 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9168 {
9169 /* Register, memory. */
9170 IEM_MC_BEGIN(0, 2);
9171 IEM_MC_LOCAL(RTUINT128U, uSrc);
9172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9173
9174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9176 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9177 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9178
9179 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9180 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9181
9182 IEM_MC_ADVANCE_RIP();
9183 IEM_MC_END();
9184 return VINF_SUCCESS;
9185 }
9186
9187 /**
9188 * @opdone
9189 * @opmnemonic ud660fe7reg
9190 * @opcode 0xe7
9191 * @opcodesub 11 mr/reg
9192 * @oppfx 0x66
9193 * @opunused immediate
9194 * @opcpuid sse
9195 * @optest ->
9196 */
9197 return IEMOP_RAISE_INVALID_OPCODE();
9198}
9199
9200/* Opcode 0xf3 0x0f 0xe7 - invalid */
9201/* Opcode 0xf2 0x0f 0xe7 - invalid */
9202
9203
9204/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
9205FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
9206/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
9207FNIEMOP_STUB(iemOp_psubsb_Vx_W);
9208/* Opcode 0xf3 0x0f 0xe8 - invalid */
9209/* Opcode 0xf2 0x0f 0xe8 - invalid */
9210
9211/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
9212FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
9213/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
9214FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
9215/* Opcode 0xf3 0x0f 0xe9 - invalid */
9216/* Opcode 0xf2 0x0f 0xe9 - invalid */
9217
9218/** Opcode 0x0f 0xea - pminsw Pq, Qq */
9219FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
9220/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
9221FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
9222/* Opcode 0xf3 0x0f 0xea - invalid */
9223/* Opcode 0xf2 0x0f 0xea - invalid */
9224
9225/** Opcode 0x0f 0xeb - por Pq, Qq */
9226FNIEMOP_STUB(iemOp_por_Pq_Qq);
9227/** Opcode 0x66 0x0f 0xeb - por Vx, W */
9228FNIEMOP_STUB(iemOp_por_Vx_W);
9229/* Opcode 0xf3 0x0f 0xeb - invalid */
9230/* Opcode 0xf2 0x0f 0xeb - invalid */
9231
9232/** Opcode 0x0f 0xec - paddsb Pq, Qq */
9233FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
9234/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
9235FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
9236/* Opcode 0xf3 0x0f 0xec - invalid */
9237/* Opcode 0xf2 0x0f 0xec - invalid */
9238
9239/** Opcode 0x0f 0xed - paddsw Pq, Qq */
9240FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
9241/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
9242FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
9243/* Opcode 0xf3 0x0f 0xed - invalid */
9244/* Opcode 0xf2 0x0f 0xed - invalid */
9245
9246/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
9247FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
9248/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
9249FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
9250/* Opcode 0xf3 0x0f 0xee - invalid */
9251/* Opcode 0xf2 0x0f 0xee - invalid */
9252
9253
9254/** Opcode 0x0f 0xef - pxor Pq, Qq */
9255FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
9256{
9257 IEMOP_MNEMONIC(pxor, "pxor");
9258 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
9259}
9260
9261/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
9262FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
9263{
9264 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
9265 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
9266}
9267
9268/* Opcode 0xf3 0x0f 0xef - invalid */
9269/* Opcode 0xf2 0x0f 0xef - invalid */
9270
9271/* Opcode 0x0f 0xf0 - invalid */
9272/* Opcode 0x66 0x0f 0xf0 - invalid */
9273/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
9274FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
9275
9276/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
9277FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
9278/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
9279FNIEMOP_STUB(iemOp_psllw_Vx_W);
9280/* Opcode 0xf2 0x0f 0xf1 - invalid */
9281
9282/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
9283FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
9284/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
9285FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
9286/* Opcode 0xf2 0x0f 0xf2 - invalid */
9287
9288/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
9289FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
9290/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
9291FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
9292/* Opcode 0xf2 0x0f 0xf3 - invalid */
9293
9294/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
9295FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
9296/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
9297FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
9298/* Opcode 0xf2 0x0f 0xf4 - invalid */
9299
9300/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
9301FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
9302/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
9303FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
9304/* Opcode 0xf2 0x0f 0xf5 - invalid */
9305
9306/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
9307FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
9308/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
9309FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
9310/* Opcode 0xf2 0x0f 0xf6 - invalid */
9311
9312/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
9313FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
9314/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
9315FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
9316/* Opcode 0xf2 0x0f 0xf7 - invalid */
9317
9318/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
9319FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
9320/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
9321FNIEMOP_STUB(iemOp_psubb_Vx_W);
9322/* Opcode 0xf2 0x0f 0xf8 - invalid */
9323
9324/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
9325FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
9326/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
9327FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
9328/* Opcode 0xf2 0x0f 0xf9 - invalid */
9329
9330/** Opcode 0x0f 0xfa - psubd Pq, Qq */
9331FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
9332/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
9333FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
9334/* Opcode 0xf2 0x0f 0xfa - invalid */
9335
9336/** Opcode 0x0f 0xfb - psubq Pq, Qq */
9337FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
9338/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
9339FNIEMOP_STUB(iemOp_psubq_Vx_W);
9340/* Opcode 0xf2 0x0f 0xfb - invalid */
9341
9342/** Opcode 0x0f 0xfc - paddb Pq, Qq */
9343FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
9344/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
9345FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
9346/* Opcode 0xf2 0x0f 0xfc - invalid */
9347
9348/** Opcode 0x0f 0xfd - paddw Pq, Qq */
9349FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
9350/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
9351FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
9352/* Opcode 0xf2 0x0f 0xfd - invalid */
9353
9354/** Opcode 0x0f 0xfe - paddd Pq, Qq */
9355FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
9356/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
9357FNIEMOP_STUB(iemOp_paddd_Vx_W);
9358/* Opcode 0xf2 0x0f 0xfe - invalid */
9359
9360
9361/** Opcode **** 0x0f 0xff - UD0 */
9362FNIEMOP_DEF(iemOp_ud0)
9363{
9364 IEMOP_MNEMONIC(ud0, "ud0");
9365 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
9366 {
9367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
9368#ifndef TST_IEM_CHECK_MC
9369 RTGCPTR GCPtrEff;
9370 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
9371 if (rcStrict != VINF_SUCCESS)
9372 return rcStrict;
9373#endif
9374 IEMOP_HLP_DONE_DECODING();
9375 }
9376 return IEMOP_RAISE_INVALID_OPCODE();
9377}
9378
9379
9380
9381/**
9382 * Two byte opcode map, first byte 0x0f.
9383 *
9384 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
9385 * check if it needs updating as well when making changes.
9386 */
9387IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
9388{
9389 /* no prefix, 066h prefix f3h prefix, f2h prefix */
9390 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
9391 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
9392 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
9393 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
9394 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
9395 /* 0x05 */ IEMOP_X4(iemOp_syscall),
9396 /* 0x06 */ IEMOP_X4(iemOp_clts),
9397 /* 0x07 */ IEMOP_X4(iemOp_sysret),
9398 /* 0x08 */ IEMOP_X4(iemOp_invd),
9399 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
9400 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
9401 /* 0x0b */ IEMOP_X4(iemOp_ud2),
9402 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
9403 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
9404 /* 0x0e */ IEMOP_X4(iemOp_femms),
9405 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
9406
9407 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
9408 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
9409 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
9410 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9411 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9412 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9413 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
9414 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9415 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
9416 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
9417 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
9418 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
9419 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
9420 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
9421 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
9422 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
9423
9424 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
9425 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
9426 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
9427 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
9428 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
9429 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9430 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
9431 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9432 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9433 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9434 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
9435 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9436 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
9437 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
9438 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9439 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9440
9441 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
9442 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
9443 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
9444 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
9445 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
9446 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
9447 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
9448 /* 0x37 */ IEMOP_X4(iemOp_getsec),
9449 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
9450 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9451 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
9452 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9453 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9454 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9455 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9456 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9457
9458 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
9459 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
9460 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
9461 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
9462 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
9463 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
9464 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
9465 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
9466 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
9467 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
9468 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
9469 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
9470 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
9471 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
9472 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
9473 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
9474
9475 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9476 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
9477 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
9478 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
9479 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9480 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9481 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9482 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9483 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
9484 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
9485 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
9486 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
9487 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
9488 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
9489 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
9490 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
9491
9492 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9493 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9494 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9495 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9496 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9497 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9498 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9499 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9500 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9501 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9502 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9503 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9504 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9505 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9506 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9507 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
9508
9509 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
9510 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
9511 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
9512 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
9513 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9514 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9515 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9516 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9517
9518 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9519 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9520 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9521 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9522 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
9523 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
9524 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
9525 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
9526
9527 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
9528 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
9529 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
9530 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
9531 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
9532 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
9533 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
9534 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
9535 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
9536 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
9537 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
9538 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
9539 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
9540 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
9541 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
9542 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
9543
9544 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
9545 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
9546 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
9547 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
9548 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
9549 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
9550 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
9551 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
9552 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
9553 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
9554 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
9555 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
9556 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
9557 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
9558 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
9559 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
9560
9561 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
9562 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
9563 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
9564 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
9565 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
9566 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
9567 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
9568 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
9569 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
9570 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
9571 /* 0xaa */ IEMOP_X4(iemOp_rsm),
9572 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
9573 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
9574 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
9575 /* 0xae */ IEMOP_X4(iemOp_Grp15),
9576 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
9577
9578 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
9579 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
9580 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
9581 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
9582 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
9583 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
9584 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
9585 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
9586 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
9587 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
9588 /* 0xba */ IEMOP_X4(iemOp_Grp8),
9589 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
9590 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
9591 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
9592 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
9593 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
9594
9595 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
9596 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
9597 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
9598 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9599 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9600 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9601 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9602 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
9603 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
9604 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
9605 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
9606 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
9607 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
9608 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
9609 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
9610 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
9611
9612 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
9613 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9614 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9615 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9616 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9617 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9618 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
9619 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9620 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9621 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9622 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9623 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9624 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9625 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9626 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9627 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9628
9629 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9630 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9631 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9632 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9633 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9634 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9635 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
9636 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9637 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9638 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9639 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9640 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9641 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9642 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9643 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9644 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9645
9646 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
9647 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9648 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9649 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9650 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9651 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9652 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9653 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9654 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9655 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9656 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9657 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9658 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9659 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9660 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9661 /* 0xff */ IEMOP_X4(iemOp_ud0),
9662};
9663AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
9664
9665/** @} */
9666
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette