VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 73912

Last change on this file since 73912 was 73756, checked in by vboxsync, 6 years ago

VMM/IEM: Nested VMX: bugref:9180 VMCLEAR skeleton.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 334.1 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 73756 2018-08-18 05:13:26Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
38 }
39
40 /* Ignore operand size here, memory refs are always 16-bit. */
41 IEM_MC_BEGIN(2, 0);
42 IEM_MC_ARG(uint16_t, iEffSeg, 0);
43 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
44 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
45 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
46 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
47 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
48 IEM_MC_END();
49 return VINF_SUCCESS;
50}
51
52
53/** Opcode 0x0f 0x00 /1. */
54FNIEMOPRM_DEF(iemOp_Grp6_str)
55{
56 IEMOP_MNEMONIC(str, "str Rv/Mw");
57 IEMOP_HLP_MIN_286();
58 IEMOP_HLP_NO_REAL_OR_V86_MODE();
59
60
61 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
62 {
63 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
64 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
65 }
66
67 /* Ignore operand size here, memory refs are always 16-bit. */
68 IEM_MC_BEGIN(2, 0);
69 IEM_MC_ARG(uint16_t, iEffSeg, 0);
70 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
71 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
72 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
73 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
74 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
75 IEM_MC_END();
76 return VINF_SUCCESS;
77}
78
79
80/** Opcode 0x0f 0x00 /2. */
81FNIEMOPRM_DEF(iemOp_Grp6_lldt)
82{
83 IEMOP_MNEMONIC(lldt, "lldt Ew");
84 IEMOP_HLP_MIN_286();
85 IEMOP_HLP_NO_REAL_OR_V86_MODE();
86
87 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
88 {
89 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
90 IEM_MC_BEGIN(1, 0);
91 IEM_MC_ARG(uint16_t, u16Sel, 0);
92 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
93 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
94 IEM_MC_END();
95 }
96 else
97 {
98 IEM_MC_BEGIN(1, 1);
99 IEM_MC_ARG(uint16_t, u16Sel, 0);
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
102 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
103 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
104 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
105 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
106 IEM_MC_END();
107 }
108 return VINF_SUCCESS;
109}
110
111
112/** Opcode 0x0f 0x00 /3. */
113FNIEMOPRM_DEF(iemOp_Grp6_ltr)
114{
115 IEMOP_MNEMONIC(ltr, "ltr Ew");
116 IEMOP_HLP_MIN_286();
117 IEMOP_HLP_NO_REAL_OR_V86_MODE();
118
119 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
120 {
121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
122 IEM_MC_BEGIN(1, 0);
123 IEM_MC_ARG(uint16_t, u16Sel, 0);
124 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
125 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
126 IEM_MC_END();
127 }
128 else
129 {
130 IEM_MC_BEGIN(1, 1);
131 IEM_MC_ARG(uint16_t, u16Sel, 0);
132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
135 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
136 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
137 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
138 IEM_MC_END();
139 }
140 return VINF_SUCCESS;
141}
142
143
144/** Opcode 0x0f 0x00 /3. */
145FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
146{
147 IEMOP_HLP_MIN_286();
148 IEMOP_HLP_NO_REAL_OR_V86_MODE();
149
150 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
151 {
152 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
153 IEM_MC_BEGIN(2, 0);
154 IEM_MC_ARG(uint16_t, u16Sel, 0);
155 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
156 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
157 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
158 IEM_MC_END();
159 }
160 else
161 {
162 IEM_MC_BEGIN(2, 1);
163 IEM_MC_ARG(uint16_t, u16Sel, 0);
164 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
167 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
168 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
169 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
170 IEM_MC_END();
171 }
172 return VINF_SUCCESS;
173}
174
175
176/** Opcode 0x0f 0x00 /4. */
177FNIEMOPRM_DEF(iemOp_Grp6_verr)
178{
179 IEMOP_MNEMONIC(verr, "verr Ew");
180 IEMOP_HLP_MIN_286();
181 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
182}
183
184
185/** Opcode 0x0f 0x00 /5. */
186FNIEMOPRM_DEF(iemOp_Grp6_verw)
187{
188 IEMOP_MNEMONIC(verw, "verw Ew");
189 IEMOP_HLP_MIN_286();
190 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
191}
192
193
194/**
195 * Group 6 jump table.
196 */
197IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
198{
199 iemOp_Grp6_sldt,
200 iemOp_Grp6_str,
201 iemOp_Grp6_lldt,
202 iemOp_Grp6_ltr,
203 iemOp_Grp6_verr,
204 iemOp_Grp6_verw,
205 iemOp_InvalidWithRM,
206 iemOp_InvalidWithRM
207};
208
209/** Opcode 0x0f 0x00. */
210FNIEMOP_DEF(iemOp_Grp6)
211{
212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
213 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
214}
215
216
217/** Opcode 0x0f 0x01 /0. */
218FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
219{
220 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
221 IEMOP_HLP_MIN_286();
222 IEMOP_HLP_64BIT_OP_SIZE();
223 IEM_MC_BEGIN(2, 1);
224 IEM_MC_ARG(uint8_t, iEffSeg, 0);
225 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
228 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
229 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
230 IEM_MC_END();
231 return VINF_SUCCESS;
232}
233
234
235/** Opcode 0x0f 0x01 /0. */
236FNIEMOP_DEF(iemOp_Grp7_vmcall)
237{
238 IEMOP_MNEMONIC(vmcall, "vmcall");
239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
240
241 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
242 want all hypercalls regardless of instruction used, and if a
243 hypercall isn't handled by GIM or HMSvm will raise an #UD.
244 (NEM/win makes ASSUMPTIONS about this behavior.) */
245 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
246}
247
248
249/** Opcode 0x0f 0x01 /0. */
250FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
251{
252 IEMOP_BITCH_ABOUT_STUB();
253 return IEMOP_RAISE_INVALID_OPCODE();
254}
255
256
257/** Opcode 0x0f 0x01 /0. */
258FNIEMOP_DEF(iemOp_Grp7_vmresume)
259{
260 IEMOP_BITCH_ABOUT_STUB();
261 return IEMOP_RAISE_INVALID_OPCODE();
262}
263
264
265/** Opcode 0x0f 0x01 /0. */
266#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
267FNIEMOP_DEF(iemOp_Grp7_vmxoff)
268{
269 IEMOP_MNEMONIC(vmxoff, "vmxoff");
270 IEMOP_HLP_DONE_DECODING();
271 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
272}
273#else
274FNIEMOP_DEF(iemOp_Grp7_vmxoff)
275{
276 IEMOP_BITCH_ABOUT_STUB();
277 return IEMOP_RAISE_INVALID_OPCODE();
278}
279#endif
280
281
282/** Opcode 0x0f 0x01 /1. */
283FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
284{
285 IEMOP_MNEMONIC(sidt, "sidt Ms");
286 IEMOP_HLP_MIN_286();
287 IEMOP_HLP_64BIT_OP_SIZE();
288 IEM_MC_BEGIN(2, 1);
289 IEM_MC_ARG(uint8_t, iEffSeg, 0);
290 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
293 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
294 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
295 IEM_MC_END();
296 return VINF_SUCCESS;
297}
298
299
300/** Opcode 0x0f 0x01 /1. */
301FNIEMOP_DEF(iemOp_Grp7_monitor)
302{
303 IEMOP_MNEMONIC(monitor, "monitor");
304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
305 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
306}
307
308
309/** Opcode 0x0f 0x01 /1. */
310FNIEMOP_DEF(iemOp_Grp7_mwait)
311{
312 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
314 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
315}
316
317
318/** Opcode 0x0f 0x01 /2. */
319FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
320{
321 IEMOP_MNEMONIC(lgdt, "lgdt");
322 IEMOP_HLP_64BIT_OP_SIZE();
323 IEM_MC_BEGIN(3, 1);
324 IEM_MC_ARG(uint8_t, iEffSeg, 0);
325 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
326 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
329 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
330 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
331 IEM_MC_END();
332 return VINF_SUCCESS;
333}
334
335
336/** Opcode 0x0f 0x01 0xd0. */
337FNIEMOP_DEF(iemOp_Grp7_xgetbv)
338{
339 IEMOP_MNEMONIC(xgetbv, "xgetbv");
340 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
341 {
342 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
343 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
344 }
345 return IEMOP_RAISE_INVALID_OPCODE();
346}
347
348
349/** Opcode 0x0f 0x01 0xd1. */
350FNIEMOP_DEF(iemOp_Grp7_xsetbv)
351{
352 IEMOP_MNEMONIC(xsetbv, "xsetbv");
353 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
354 {
355 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
356 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
357 }
358 return IEMOP_RAISE_INVALID_OPCODE();
359}
360
361
362/** Opcode 0x0f 0x01 /3. */
363FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
364{
365 IEMOP_MNEMONIC(lidt, "lidt");
366 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
367 ? IEMMODE_64BIT
368 : pVCpu->iem.s.enmEffOpSize;
369 IEM_MC_BEGIN(3, 1);
370 IEM_MC_ARG(uint8_t, iEffSeg, 0);
371 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
372 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
375 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
376 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
377 IEM_MC_END();
378 return VINF_SUCCESS;
379}
380
381
382/** Opcode 0x0f 0x01 0xd8. */
383#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
384FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
385{
386 IEMOP_MNEMONIC(vmrun, "vmrun");
387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
388 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
389}
390#else
391FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
392#endif
393
394/** Opcode 0x0f 0x01 0xd9. */
395FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
396{
397 IEMOP_MNEMONIC(vmmcall, "vmmcall");
398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
399
400 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
401 want all hypercalls regardless of instruction used, and if a
402 hypercall isn't handled by GIM or HMSvm will raise an #UD.
403 (NEM/win makes ASSUMPTIONS about this behavior.) */
404 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
405}
406
407/** Opcode 0x0f 0x01 0xda. */
408#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
409FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
410{
411 IEMOP_MNEMONIC(vmload, "vmload");
412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
413 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
414}
415#else
416FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
417#endif
418
419
420/** Opcode 0x0f 0x01 0xdb. */
421#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
422FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
423{
424 IEMOP_MNEMONIC(vmsave, "vmsave");
425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
426 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
427}
428#else
429FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
430#endif
431
432
433/** Opcode 0x0f 0x01 0xdc. */
434#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
435FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
436{
437 IEMOP_MNEMONIC(stgi, "stgi");
438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
439 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
440}
441#else
442FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
443#endif
444
445
446/** Opcode 0x0f 0x01 0xdd. */
447#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
448FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
449{
450 IEMOP_MNEMONIC(clgi, "clgi");
451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
452 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
453}
454#else
455FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
456#endif
457
458
459/** Opcode 0x0f 0x01 0xdf. */
460#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
461FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
462{
463 IEMOP_MNEMONIC(invlpga, "invlpga");
464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
465 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
466}
467#else
468FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
469#endif
470
471
472/** Opcode 0x0f 0x01 0xde. */
473#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
474FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
475{
476 IEMOP_MNEMONIC(skinit, "skinit");
477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
478 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
479}
480#else
481FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
482#endif
483
484
485/** Opcode 0x0f 0x01 /4. */
486FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
487{
488 IEMOP_MNEMONIC(smsw, "smsw");
489 IEMOP_HLP_MIN_286();
490 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
491 {
492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
493 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
494 }
495
496 /* Ignore operand size here, memory refs are always 16-bit. */
497 IEM_MC_BEGIN(2, 0);
498 IEM_MC_ARG(uint16_t, iEffSeg, 0);
499 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
502 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
503 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
504 IEM_MC_END();
505 return VINF_SUCCESS;
506}
507
508
509/** Opcode 0x0f 0x01 /6. */
510FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
511{
512 /* The operand size is effectively ignored, all is 16-bit and only the
513 lower 3-bits are used. */
514 IEMOP_MNEMONIC(lmsw, "lmsw");
515 IEMOP_HLP_MIN_286();
516 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
517 {
518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
519 IEM_MC_BEGIN(1, 0);
520 IEM_MC_ARG(uint16_t, u16Tmp, 0);
521 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
522 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
523 IEM_MC_END();
524 }
525 else
526 {
527 IEM_MC_BEGIN(1, 1);
528 IEM_MC_ARG(uint16_t, u16Tmp, 0);
529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
532 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
533 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
534 IEM_MC_END();
535 }
536 return VINF_SUCCESS;
537}
538
539
540/** Opcode 0x0f 0x01 /7. */
541FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
542{
543 IEMOP_MNEMONIC(invlpg, "invlpg");
544 IEMOP_HLP_MIN_486();
545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
546 IEM_MC_BEGIN(1, 1);
547 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
549 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
550 IEM_MC_END();
551 return VINF_SUCCESS;
552}
553
554
555/** Opcode 0x0f 0x01 /7. */
556FNIEMOP_DEF(iemOp_Grp7_swapgs)
557{
558 IEMOP_MNEMONIC(swapgs, "swapgs");
559 IEMOP_HLP_ONLY_64BIT();
560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
561 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
562}
563
564
565/** Opcode 0x0f 0x01 /7. */
566FNIEMOP_DEF(iemOp_Grp7_rdtscp)
567{
568 IEMOP_MNEMONIC(rdtscp, "rdtscp");
569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
570 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
571}
572
573
574/**
575 * Group 7 jump table, memory variant.
576 */
577IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
578{
579 iemOp_Grp7_sgdt,
580 iemOp_Grp7_sidt,
581 iemOp_Grp7_lgdt,
582 iemOp_Grp7_lidt,
583 iemOp_Grp7_smsw,
584 iemOp_InvalidWithRM,
585 iemOp_Grp7_lmsw,
586 iemOp_Grp7_invlpg
587};
588
589
590/** Opcode 0x0f 0x01. */
591FNIEMOP_DEF(iemOp_Grp7)
592{
593 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
594 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
595 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
596
597 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
598 {
599 case 0:
600 switch (bRm & X86_MODRM_RM_MASK)
601 {
602 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
603 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
604 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
605 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
606 }
607 return IEMOP_RAISE_INVALID_OPCODE();
608
609 case 1:
610 switch (bRm & X86_MODRM_RM_MASK)
611 {
612 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
613 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
614 }
615 return IEMOP_RAISE_INVALID_OPCODE();
616
617 case 2:
618 switch (bRm & X86_MODRM_RM_MASK)
619 {
620 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
621 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
622 }
623 return IEMOP_RAISE_INVALID_OPCODE();
624
625 case 3:
626 switch (bRm & X86_MODRM_RM_MASK)
627 {
628 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
629 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
630 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
631 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
632 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
633 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
634 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
635 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
637 }
638
639 case 4:
640 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
641
642 case 5:
643 return IEMOP_RAISE_INVALID_OPCODE();
644
645 case 6:
646 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
647
648 case 7:
649 switch (bRm & X86_MODRM_RM_MASK)
650 {
651 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
652 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
653 }
654 return IEMOP_RAISE_INVALID_OPCODE();
655
656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
657 }
658}
659
660/** Opcode 0x0f 0x00 /3. */
661FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
662{
663 IEMOP_HLP_NO_REAL_OR_V86_MODE();
664 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
665
666 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
667 {
668 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
669 switch (pVCpu->iem.s.enmEffOpSize)
670 {
671 case IEMMODE_16BIT:
672 {
673 IEM_MC_BEGIN(3, 0);
674 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
675 IEM_MC_ARG(uint16_t, u16Sel, 1);
676 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
677
678 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
679 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
680 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
681
682 IEM_MC_END();
683 return VINF_SUCCESS;
684 }
685
686 case IEMMODE_32BIT:
687 case IEMMODE_64BIT:
688 {
689 IEM_MC_BEGIN(3, 0);
690 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
691 IEM_MC_ARG(uint16_t, u16Sel, 1);
692 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
693
694 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
695 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
696 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
697
698 IEM_MC_END();
699 return VINF_SUCCESS;
700 }
701
702 IEM_NOT_REACHED_DEFAULT_CASE_RET();
703 }
704 }
705 else
706 {
707 switch (pVCpu->iem.s.enmEffOpSize)
708 {
709 case IEMMODE_16BIT:
710 {
711 IEM_MC_BEGIN(3, 1);
712 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
713 IEM_MC_ARG(uint16_t, u16Sel, 1);
714 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
716
717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
718 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
719
720 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
721 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
722 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
723
724 IEM_MC_END();
725 return VINF_SUCCESS;
726 }
727
728 case IEMMODE_32BIT:
729 case IEMMODE_64BIT:
730 {
731 IEM_MC_BEGIN(3, 1);
732 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
733 IEM_MC_ARG(uint16_t, u16Sel, 1);
734 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
736
737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
738 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
739/** @todo testcase: make sure it's a 16-bit read. */
740
741 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
742 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
743 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
744
745 IEM_MC_END();
746 return VINF_SUCCESS;
747 }
748
749 IEM_NOT_REACHED_DEFAULT_CASE_RET();
750 }
751 }
752}
753
754
755
756/** Opcode 0x0f 0x02. */
757FNIEMOP_DEF(iemOp_lar_Gv_Ew)
758{
759 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
760 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
761}
762
763
764/** Opcode 0x0f 0x03. */
765FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
766{
767 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
768 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
769}
770
771
772/** Opcode 0x0f 0x05. */
773FNIEMOP_DEF(iemOp_syscall)
774{
775 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
777 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
778}
779
780
781/** Opcode 0x0f 0x06. */
782FNIEMOP_DEF(iemOp_clts)
783{
784 IEMOP_MNEMONIC(clts, "clts");
785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
786 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
787}
788
789
790/** Opcode 0x0f 0x07. */
791FNIEMOP_DEF(iemOp_sysret)
792{
793 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
795 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
796}
797
798
799/** Opcode 0x0f 0x08. */
800FNIEMOP_DEF(iemOp_invd)
801{
802 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
803 IEMOP_HLP_MIN_486();
804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
805 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
806}
807
808
809/** Opcode 0x0f 0x09. */
810FNIEMOP_DEF(iemOp_wbinvd)
811{
812 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
813 IEMOP_HLP_MIN_486();
814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
815 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
816}
817
818
819/** Opcode 0x0f 0x0b. */
820FNIEMOP_DEF(iemOp_ud2)
821{
822 IEMOP_MNEMONIC(ud2, "ud2");
823 return IEMOP_RAISE_INVALID_OPCODE();
824}
825
826/** Opcode 0x0f 0x0d. */
827FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
828{
829 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
830 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
831 {
832 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
833 return IEMOP_RAISE_INVALID_OPCODE();
834 }
835
836 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
837 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
838 {
839 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
840 return IEMOP_RAISE_INVALID_OPCODE();
841 }
842
843 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
844 {
845 case 2: /* Aliased to /0 for the time being. */
846 case 4: /* Aliased to /0 for the time being. */
847 case 5: /* Aliased to /0 for the time being. */
848 case 6: /* Aliased to /0 for the time being. */
849 case 7: /* Aliased to /0 for the time being. */
850 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
851 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
852 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
853 IEM_NOT_REACHED_DEFAULT_CASE_RET();
854 }
855
856 IEM_MC_BEGIN(0, 1);
857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
860 /* Currently a NOP. */
861 NOREF(GCPtrEffSrc);
862 IEM_MC_ADVANCE_RIP();
863 IEM_MC_END();
864 return VINF_SUCCESS;
865}
866
867
868/** Opcode 0x0f 0x0e. */
869FNIEMOP_DEF(iemOp_femms)
870{
871 IEMOP_MNEMONIC(femms, "femms");
872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
873
874 IEM_MC_BEGIN(0,0);
875 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
876 IEM_MC_MAYBE_RAISE_FPU_XCPT();
877 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
878 IEM_MC_FPU_FROM_MMX_MODE();
879 IEM_MC_ADVANCE_RIP();
880 IEM_MC_END();
881 return VINF_SUCCESS;
882}
883
884
885/** Opcode 0x0f 0x0f. */
886FNIEMOP_DEF(iemOp_3Dnow)
887{
888 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
889 {
890 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
891 return IEMOP_RAISE_INVALID_OPCODE();
892 }
893
894#ifdef IEM_WITH_3DNOW
895 /* This is pretty sparse, use switch instead of table. */
896 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
897 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
898#else
899 IEMOP_BITCH_ABOUT_STUB();
900 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
901#endif
902}
903
904
905/**
906 * @opcode 0x10
907 * @oppfx none
908 * @opcpuid sse
909 * @opgroup og_sse_simdfp_datamove
910 * @opxcpttype 4UA
911 * @optest op1=1 op2=2 -> op1=2
912 * @optest op1=0 op2=-22 -> op1=-22
913 */
914FNIEMOP_DEF(iemOp_movups_Vps_Wps)
915{
916 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
917 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
918 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
919 {
920 /*
921 * Register, register.
922 */
923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
924 IEM_MC_BEGIN(0, 0);
925 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
926 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
927 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
928 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
929 IEM_MC_ADVANCE_RIP();
930 IEM_MC_END();
931 }
932 else
933 {
934 /*
935 * Memory, register.
936 */
937 IEM_MC_BEGIN(0, 2);
938 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
940
941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
943 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
944 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
945
946 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
947 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
948
949 IEM_MC_ADVANCE_RIP();
950 IEM_MC_END();
951 }
952 return VINF_SUCCESS;
953
954}
955
956
957/**
958 * @opcode 0x10
959 * @oppfx 0x66
960 * @opcpuid sse2
961 * @opgroup og_sse2_pcksclr_datamove
962 * @opxcpttype 4UA
963 * @optest op1=1 op2=2 -> op1=2
964 * @optest op1=0 op2=-42 -> op1=-42
965 */
966FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
967{
968 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
969 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
970 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
971 {
972 /*
973 * Register, register.
974 */
975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
976 IEM_MC_BEGIN(0, 0);
977 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
978 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
979 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
980 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
981 IEM_MC_ADVANCE_RIP();
982 IEM_MC_END();
983 }
984 else
985 {
986 /*
987 * Memory, register.
988 */
989 IEM_MC_BEGIN(0, 2);
990 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
991 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
992
993 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
995 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
996 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
997
998 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
999 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1000
1001 IEM_MC_ADVANCE_RIP();
1002 IEM_MC_END();
1003 }
1004 return VINF_SUCCESS;
1005}
1006
1007
1008/**
1009 * @opcode 0x10
1010 * @oppfx 0xf3
1011 * @opcpuid sse
1012 * @opgroup og_sse_simdfp_datamove
1013 * @opxcpttype 5
1014 * @optest op1=1 op2=2 -> op1=2
1015 * @optest op1=0 op2=-22 -> op1=-22
1016 */
1017FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1018{
1019 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1020 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1021 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1022 {
1023 /*
1024 * Register, register.
1025 */
1026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1027 IEM_MC_BEGIN(0, 1);
1028 IEM_MC_LOCAL(uint32_t, uSrc);
1029
1030 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1031 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1032 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1033 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1034
1035 IEM_MC_ADVANCE_RIP();
1036 IEM_MC_END();
1037 }
1038 else
1039 {
1040 /*
1041 * Memory, register.
1042 */
1043 IEM_MC_BEGIN(0, 2);
1044 IEM_MC_LOCAL(uint32_t, uSrc);
1045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1046
1047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1049 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1050 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1051
1052 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1053 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1054
1055 IEM_MC_ADVANCE_RIP();
1056 IEM_MC_END();
1057 }
1058 return VINF_SUCCESS;
1059}
1060
1061
1062/**
1063 * @opcode 0x10
1064 * @oppfx 0xf2
1065 * @opcpuid sse2
1066 * @opgroup og_sse2_pcksclr_datamove
1067 * @opxcpttype 5
1068 * @optest op1=1 op2=2 -> op1=2
1069 * @optest op1=0 op2=-42 -> op1=-42
1070 */
1071FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1072{
1073 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1074 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1075 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1076 {
1077 /*
1078 * Register, register.
1079 */
1080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1081 IEM_MC_BEGIN(0, 1);
1082 IEM_MC_LOCAL(uint64_t, uSrc);
1083
1084 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1085 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1086 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1087 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1088
1089 IEM_MC_ADVANCE_RIP();
1090 IEM_MC_END();
1091 }
1092 else
1093 {
1094 /*
1095 * Memory, register.
1096 */
1097 IEM_MC_BEGIN(0, 2);
1098 IEM_MC_LOCAL(uint64_t, uSrc);
1099 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1100
1101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1103 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1104 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1105
1106 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1107 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1108
1109 IEM_MC_ADVANCE_RIP();
1110 IEM_MC_END();
1111 }
1112 return VINF_SUCCESS;
1113}
1114
1115
1116/**
1117 * @opcode 0x11
1118 * @oppfx none
1119 * @opcpuid sse
1120 * @opgroup og_sse_simdfp_datamove
1121 * @opxcpttype 4UA
1122 * @optest op1=1 op2=2 -> op1=2
1123 * @optest op1=0 op2=-42 -> op1=-42
1124 */
1125FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1126{
1127 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1129 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1130 {
1131 /*
1132 * Register, register.
1133 */
1134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1135 IEM_MC_BEGIN(0, 0);
1136 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1137 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1138 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1139 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1140 IEM_MC_ADVANCE_RIP();
1141 IEM_MC_END();
1142 }
1143 else
1144 {
1145 /*
1146 * Memory, register.
1147 */
1148 IEM_MC_BEGIN(0, 2);
1149 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1151
1152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1154 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1155 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1156
1157 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1158 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1159
1160 IEM_MC_ADVANCE_RIP();
1161 IEM_MC_END();
1162 }
1163 return VINF_SUCCESS;
1164}
1165
1166
1167/**
1168 * @opcode 0x11
1169 * @oppfx 0x66
1170 * @opcpuid sse2
1171 * @opgroup og_sse2_pcksclr_datamove
1172 * @opxcpttype 4UA
1173 * @optest op1=1 op2=2 -> op1=2
1174 * @optest op1=0 op2=-42 -> op1=-42
1175 */
1176FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1177{
1178 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1180 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1181 {
1182 /*
1183 * Register, register.
1184 */
1185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1186 IEM_MC_BEGIN(0, 0);
1187 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1188 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1189 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1190 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1191 IEM_MC_ADVANCE_RIP();
1192 IEM_MC_END();
1193 }
1194 else
1195 {
1196 /*
1197 * Memory, register.
1198 */
1199 IEM_MC_BEGIN(0, 2);
1200 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1202
1203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1205 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1206 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1207
1208 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1209 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1210
1211 IEM_MC_ADVANCE_RIP();
1212 IEM_MC_END();
1213 }
1214 return VINF_SUCCESS;
1215}
1216
1217
1218/**
1219 * @opcode 0x11
1220 * @oppfx 0xf3
1221 * @opcpuid sse
1222 * @opgroup og_sse_simdfp_datamove
1223 * @opxcpttype 5
1224 * @optest op1=1 op2=2 -> op1=2
1225 * @optest op1=0 op2=-22 -> op1=-22
1226 */
1227FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1228{
1229 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1230 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1231 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1232 {
1233 /*
1234 * Register, register.
1235 */
1236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1237 IEM_MC_BEGIN(0, 1);
1238 IEM_MC_LOCAL(uint32_t, uSrc);
1239
1240 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1241 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1242 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1243 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1244
1245 IEM_MC_ADVANCE_RIP();
1246 IEM_MC_END();
1247 }
1248 else
1249 {
1250 /*
1251 * Memory, register.
1252 */
1253 IEM_MC_BEGIN(0, 2);
1254 IEM_MC_LOCAL(uint32_t, uSrc);
1255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1256
1257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1259 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1260 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1261
1262 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1263 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1264
1265 IEM_MC_ADVANCE_RIP();
1266 IEM_MC_END();
1267 }
1268 return VINF_SUCCESS;
1269}
1270
1271
1272/**
1273 * @opcode 0x11
1274 * @oppfx 0xf2
1275 * @opcpuid sse2
1276 * @opgroup og_sse2_pcksclr_datamove
1277 * @opxcpttype 5
1278 * @optest op1=1 op2=2 -> op1=2
1279 * @optest op1=0 op2=-42 -> op1=-42
1280 */
1281FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1282{
1283 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1284 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1285 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1286 {
1287 /*
1288 * Register, register.
1289 */
1290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1291 IEM_MC_BEGIN(0, 1);
1292 IEM_MC_LOCAL(uint64_t, uSrc);
1293
1294 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1295 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1296 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1297 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1298
1299 IEM_MC_ADVANCE_RIP();
1300 IEM_MC_END();
1301 }
1302 else
1303 {
1304 /*
1305 * Memory, register.
1306 */
1307 IEM_MC_BEGIN(0, 2);
1308 IEM_MC_LOCAL(uint64_t, uSrc);
1309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1310
1311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1313 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1314 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1315
1316 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1317 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1318
1319 IEM_MC_ADVANCE_RIP();
1320 IEM_MC_END();
1321 }
1322 return VINF_SUCCESS;
1323}
1324
1325
1326FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1327{
1328 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1329 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1330 {
1331 /**
1332 * @opcode 0x12
1333 * @opcodesub 11 mr/reg
1334 * @oppfx none
1335 * @opcpuid sse
1336 * @opgroup og_sse_simdfp_datamove
1337 * @opxcpttype 5
1338 * @optest op1=1 op2=2 -> op1=2
1339 * @optest op1=0 op2=-42 -> op1=-42
1340 */
1341 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1342
1343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1344 IEM_MC_BEGIN(0, 1);
1345 IEM_MC_LOCAL(uint64_t, uSrc);
1346
1347 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1348 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1349 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1350 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1351
1352 IEM_MC_ADVANCE_RIP();
1353 IEM_MC_END();
1354 }
1355 else
1356 {
1357 /**
1358 * @opdone
1359 * @opcode 0x12
1360 * @opcodesub !11 mr/reg
1361 * @oppfx none
1362 * @opcpuid sse
1363 * @opgroup og_sse_simdfp_datamove
1364 * @opxcpttype 5
1365 * @optest op1=1 op2=2 -> op1=2
1366 * @optest op1=0 op2=-42 -> op1=-42
1367 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1368 */
1369 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1370
1371 IEM_MC_BEGIN(0, 2);
1372 IEM_MC_LOCAL(uint64_t, uSrc);
1373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1374
1375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1377 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1378 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1379
1380 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1381 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1382
1383 IEM_MC_ADVANCE_RIP();
1384 IEM_MC_END();
1385 }
1386 return VINF_SUCCESS;
1387}
1388
1389
1390/**
1391 * @opcode 0x12
1392 * @opcodesub !11 mr/reg
1393 * @oppfx 0x66
1394 * @opcpuid sse2
1395 * @opgroup og_sse2_pcksclr_datamove
1396 * @opxcpttype 5
1397 * @optest op1=1 op2=2 -> op1=2
1398 * @optest op1=0 op2=-42 -> op1=-42
1399 */
1400FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1401{
1402 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1403 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1404 {
1405 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1406
1407 IEM_MC_BEGIN(0, 2);
1408 IEM_MC_LOCAL(uint64_t, uSrc);
1409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1410
1411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1413 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1414 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1415
1416 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1417 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1418
1419 IEM_MC_ADVANCE_RIP();
1420 IEM_MC_END();
1421 return VINF_SUCCESS;
1422 }
1423
1424 /**
1425 * @opdone
1426 * @opmnemonic ud660f12m3
1427 * @opcode 0x12
1428 * @opcodesub 11 mr/reg
1429 * @oppfx 0x66
1430 * @opunused immediate
1431 * @opcpuid sse
1432 * @optest ->
1433 */
1434 return IEMOP_RAISE_INVALID_OPCODE();
1435}
1436
1437
1438/**
1439 * @opcode 0x12
1440 * @oppfx 0xf3
1441 * @opcpuid sse3
1442 * @opgroup og_sse3_pcksclr_datamove
1443 * @opxcpttype 4
1444 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1445 * op1=0x00000002000000020000000100000001
1446 */
1447FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1448{
1449 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1450 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1451 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1452 {
1453 /*
1454 * Register, register.
1455 */
1456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1457 IEM_MC_BEGIN(2, 0);
1458 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1459 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1460
1461 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1462 IEM_MC_PREPARE_SSE_USAGE();
1463
1464 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1465 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1466 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1467
1468 IEM_MC_ADVANCE_RIP();
1469 IEM_MC_END();
1470 }
1471 else
1472 {
1473 /*
1474 * Register, memory.
1475 */
1476 IEM_MC_BEGIN(2, 2);
1477 IEM_MC_LOCAL(RTUINT128U, uSrc);
1478 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1479 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1480 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1481
1482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1484 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1485 IEM_MC_PREPARE_SSE_USAGE();
1486
1487 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1488 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1489 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1490
1491 IEM_MC_ADVANCE_RIP();
1492 IEM_MC_END();
1493 }
1494 return VINF_SUCCESS;
1495}
1496
1497
1498/**
1499 * @opcode 0x12
1500 * @oppfx 0xf2
1501 * @opcpuid sse3
1502 * @opgroup og_sse3_pcksclr_datamove
1503 * @opxcpttype 5
1504 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1505 * op1=0x22222222111111112222222211111111
1506 */
1507FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1508{
1509 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1510 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1511 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1512 {
1513 /*
1514 * Register, register.
1515 */
1516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1517 IEM_MC_BEGIN(2, 0);
1518 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1519 IEM_MC_ARG(uint64_t, uSrc, 1);
1520
1521 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1522 IEM_MC_PREPARE_SSE_USAGE();
1523
1524 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1525 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1526 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1527
1528 IEM_MC_ADVANCE_RIP();
1529 IEM_MC_END();
1530 }
1531 else
1532 {
1533 /*
1534 * Register, memory.
1535 */
1536 IEM_MC_BEGIN(2, 2);
1537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1538 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1539 IEM_MC_ARG(uint64_t, uSrc, 1);
1540
1541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1543 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1544 IEM_MC_PREPARE_SSE_USAGE();
1545
1546 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1547 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1548 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1549
1550 IEM_MC_ADVANCE_RIP();
1551 IEM_MC_END();
1552 }
1553 return VINF_SUCCESS;
1554}
1555
1556
1557/**
1558 * @opcode 0x13
1559 * @opcodesub !11 mr/reg
1560 * @oppfx none
1561 * @opcpuid sse
1562 * @opgroup og_sse_simdfp_datamove
1563 * @opxcpttype 5
1564 * @optest op1=1 op2=2 -> op1=2
1565 * @optest op1=0 op2=-42 -> op1=-42
1566 */
1567FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1568{
1569 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1570 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1571 {
1572 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1573
1574 IEM_MC_BEGIN(0, 2);
1575 IEM_MC_LOCAL(uint64_t, uSrc);
1576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1577
1578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1580 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1581 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1582
1583 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1584 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1585
1586 IEM_MC_ADVANCE_RIP();
1587 IEM_MC_END();
1588 return VINF_SUCCESS;
1589 }
1590
1591 /**
1592 * @opdone
1593 * @opmnemonic ud0f13m3
1594 * @opcode 0x13
1595 * @opcodesub 11 mr/reg
1596 * @oppfx none
1597 * @opunused immediate
1598 * @opcpuid sse
1599 * @optest ->
1600 */
1601 return IEMOP_RAISE_INVALID_OPCODE();
1602}
1603
1604
1605/**
1606 * @opcode 0x13
1607 * @opcodesub !11 mr/reg
1608 * @oppfx 0x66
1609 * @opcpuid sse2
1610 * @opgroup og_sse2_pcksclr_datamove
1611 * @opxcpttype 5
1612 * @optest op1=1 op2=2 -> op1=2
1613 * @optest op1=0 op2=-42 -> op1=-42
1614 */
1615FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1616{
1617 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1618 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1619 {
1620 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1621 IEM_MC_BEGIN(0, 2);
1622 IEM_MC_LOCAL(uint64_t, uSrc);
1623 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1624
1625 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1627 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1628 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1629
1630 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1631 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1632
1633 IEM_MC_ADVANCE_RIP();
1634 IEM_MC_END();
1635 return VINF_SUCCESS;
1636 }
1637
1638 /**
1639 * @opdone
1640 * @opmnemonic ud660f13m3
1641 * @opcode 0x13
1642 * @opcodesub 11 mr/reg
1643 * @oppfx 0x66
1644 * @opunused immediate
1645 * @opcpuid sse
1646 * @optest ->
1647 */
1648 return IEMOP_RAISE_INVALID_OPCODE();
1649}
1650
1651
1652/**
1653 * @opmnemonic udf30f13
1654 * @opcode 0x13
1655 * @oppfx 0xf3
1656 * @opunused intel-modrm
1657 * @opcpuid sse
1658 * @optest ->
1659 * @opdone
1660 */
1661
1662/**
1663 * @opmnemonic udf20f13
1664 * @opcode 0x13
1665 * @oppfx 0xf2
1666 * @opunused intel-modrm
1667 * @opcpuid sse
1668 * @optest ->
1669 * @opdone
1670 */
1671
1672/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1673FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1674/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1675FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1676
1677/**
1678 * @opdone
1679 * @opmnemonic udf30f14
1680 * @opcode 0x14
1681 * @oppfx 0xf3
1682 * @opunused intel-modrm
1683 * @opcpuid sse
1684 * @optest ->
1685 * @opdone
1686 */
1687
1688/**
1689 * @opmnemonic udf20f14
1690 * @opcode 0x14
1691 * @oppfx 0xf2
1692 * @opunused intel-modrm
1693 * @opcpuid sse
1694 * @optest ->
1695 * @opdone
1696 */
1697
1698/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1699FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1700/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1701FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1702/* Opcode 0xf3 0x0f 0x15 - invalid */
1703/* Opcode 0xf2 0x0f 0x15 - invalid */
1704
1705/**
1706 * @opdone
1707 * @opmnemonic udf30f15
1708 * @opcode 0x15
1709 * @oppfx 0xf3
1710 * @opunused intel-modrm
1711 * @opcpuid sse
1712 * @optest ->
1713 * @opdone
1714 */
1715
1716/**
1717 * @opmnemonic udf20f15
1718 * @opcode 0x15
1719 * @oppfx 0xf2
1720 * @opunused intel-modrm
1721 * @opcpuid sse
1722 * @optest ->
1723 * @opdone
1724 */
1725
1726FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1727{
1728 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1729 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1730 {
1731 /**
1732 * @opcode 0x16
1733 * @opcodesub 11 mr/reg
1734 * @oppfx none
1735 * @opcpuid sse
1736 * @opgroup og_sse_simdfp_datamove
1737 * @opxcpttype 5
1738 * @optest op1=1 op2=2 -> op1=2
1739 * @optest op1=0 op2=-42 -> op1=-42
1740 */
1741 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1742
1743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1744 IEM_MC_BEGIN(0, 1);
1745 IEM_MC_LOCAL(uint64_t, uSrc);
1746
1747 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1748 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1749 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1750 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1751
1752 IEM_MC_ADVANCE_RIP();
1753 IEM_MC_END();
1754 }
1755 else
1756 {
1757 /**
1758 * @opdone
1759 * @opcode 0x16
1760 * @opcodesub !11 mr/reg
1761 * @oppfx none
1762 * @opcpuid sse
1763 * @opgroup og_sse_simdfp_datamove
1764 * @opxcpttype 5
1765 * @optest op1=1 op2=2 -> op1=2
1766 * @optest op1=0 op2=-42 -> op1=-42
1767 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1768 */
1769 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1770
1771 IEM_MC_BEGIN(0, 2);
1772 IEM_MC_LOCAL(uint64_t, uSrc);
1773 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1774
1775 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1777 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1778 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1779
1780 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1781 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1782
1783 IEM_MC_ADVANCE_RIP();
1784 IEM_MC_END();
1785 }
1786 return VINF_SUCCESS;
1787}
1788
1789
1790/**
1791 * @opcode 0x16
1792 * @opcodesub !11 mr/reg
1793 * @oppfx 0x66
1794 * @opcpuid sse2
1795 * @opgroup og_sse2_pcksclr_datamove
1796 * @opxcpttype 5
1797 * @optest op1=1 op2=2 -> op1=2
1798 * @optest op1=0 op2=-42 -> op1=-42
1799 */
1800FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1801{
1802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1803 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1804 {
1805 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1806 IEM_MC_BEGIN(0, 2);
1807 IEM_MC_LOCAL(uint64_t, uSrc);
1808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1809
1810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1812 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1813 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1814
1815 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1816 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1817
1818 IEM_MC_ADVANCE_RIP();
1819 IEM_MC_END();
1820 return VINF_SUCCESS;
1821 }
1822
1823 /**
1824 * @opdone
1825 * @opmnemonic ud660f16m3
1826 * @opcode 0x16
1827 * @opcodesub 11 mr/reg
1828 * @oppfx 0x66
1829 * @opunused immediate
1830 * @opcpuid sse
1831 * @optest ->
1832 */
1833 return IEMOP_RAISE_INVALID_OPCODE();
1834}
1835
1836
1837/**
1838 * @opcode 0x16
1839 * @oppfx 0xf3
1840 * @opcpuid sse3
1841 * @opgroup og_sse3_pcksclr_datamove
1842 * @opxcpttype 4
1843 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1844 * op1=0x00000002000000020000000100000001
1845 */
1846FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1847{
1848 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1849 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1850 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1851 {
1852 /*
1853 * Register, register.
1854 */
1855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1856 IEM_MC_BEGIN(2, 0);
1857 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1858 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1859
1860 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1861 IEM_MC_PREPARE_SSE_USAGE();
1862
1863 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1864 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1865 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1866
1867 IEM_MC_ADVANCE_RIP();
1868 IEM_MC_END();
1869 }
1870 else
1871 {
1872 /*
1873 * Register, memory.
1874 */
1875 IEM_MC_BEGIN(2, 2);
1876 IEM_MC_LOCAL(RTUINT128U, uSrc);
1877 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1878 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1879 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1880
1881 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1883 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1884 IEM_MC_PREPARE_SSE_USAGE();
1885
1886 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1887 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1888 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1889
1890 IEM_MC_ADVANCE_RIP();
1891 IEM_MC_END();
1892 }
1893 return VINF_SUCCESS;
1894}
1895
1896/**
1897 * @opdone
1898 * @opmnemonic udf30f16
1899 * @opcode 0x16
1900 * @oppfx 0xf2
1901 * @opunused intel-modrm
1902 * @opcpuid sse
1903 * @optest ->
1904 * @opdone
1905 */
1906
1907
1908/**
1909 * @opcode 0x17
1910 * @opcodesub !11 mr/reg
1911 * @oppfx none
1912 * @opcpuid sse
1913 * @opgroup og_sse_simdfp_datamove
1914 * @opxcpttype 5
1915 * @optest op1=1 op2=2 -> op1=2
1916 * @optest op1=0 op2=-42 -> op1=-42
1917 */
1918FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
1919{
1920 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1921 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1922 {
1923 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1924
1925 IEM_MC_BEGIN(0, 2);
1926 IEM_MC_LOCAL(uint64_t, uSrc);
1927 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1928
1929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1931 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1932 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1933
1934 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1935 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1936
1937 IEM_MC_ADVANCE_RIP();
1938 IEM_MC_END();
1939 return VINF_SUCCESS;
1940 }
1941
1942 /**
1943 * @opdone
1944 * @opmnemonic ud0f17m3
1945 * @opcode 0x17
1946 * @opcodesub 11 mr/reg
1947 * @oppfx none
1948 * @opunused immediate
1949 * @opcpuid sse
1950 * @optest ->
1951 */
1952 return IEMOP_RAISE_INVALID_OPCODE();
1953}
1954
1955
1956/**
1957 * @opcode 0x17
1958 * @opcodesub !11 mr/reg
1959 * @oppfx 0x66
1960 * @opcpuid sse2
1961 * @opgroup og_sse2_pcksclr_datamove
1962 * @opxcpttype 5
1963 * @optest op1=1 op2=2 -> op1=2
1964 * @optest op1=0 op2=-42 -> op1=-42
1965 */
1966FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
1967{
1968 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1969 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1970 {
1971 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1972
1973 IEM_MC_BEGIN(0, 2);
1974 IEM_MC_LOCAL(uint64_t, uSrc);
1975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1976
1977 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1979 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1980 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1981
1982 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1983 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1984
1985 IEM_MC_ADVANCE_RIP();
1986 IEM_MC_END();
1987 return VINF_SUCCESS;
1988 }
1989
1990 /**
1991 * @opdone
1992 * @opmnemonic ud660f17m3
1993 * @opcode 0x17
1994 * @opcodesub 11 mr/reg
1995 * @oppfx 0x66
1996 * @opunused immediate
1997 * @opcpuid sse
1998 * @optest ->
1999 */
2000 return IEMOP_RAISE_INVALID_OPCODE();
2001}
2002
2003
2004/**
2005 * @opdone
2006 * @opmnemonic udf30f17
2007 * @opcode 0x17
2008 * @oppfx 0xf3
2009 * @opunused intel-modrm
2010 * @opcpuid sse
2011 * @optest ->
2012 * @opdone
2013 */
2014
2015/**
2016 * @opmnemonic udf20f17
2017 * @opcode 0x17
2018 * @oppfx 0xf2
2019 * @opunused intel-modrm
2020 * @opcpuid sse
2021 * @optest ->
2022 * @opdone
2023 */
2024
2025
2026/** Opcode 0x0f 0x18. */
2027FNIEMOP_DEF(iemOp_prefetch_Grp16)
2028{
2029 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2030 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2031 {
2032 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2033 {
2034 case 4: /* Aliased to /0 for the time being according to AMD. */
2035 case 5: /* Aliased to /0 for the time being according to AMD. */
2036 case 6: /* Aliased to /0 for the time being according to AMD. */
2037 case 7: /* Aliased to /0 for the time being according to AMD. */
2038 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2039 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2040 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2041 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2042 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2043 }
2044
2045 IEM_MC_BEGIN(0, 1);
2046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2049 /* Currently a NOP. */
2050 NOREF(GCPtrEffSrc);
2051 IEM_MC_ADVANCE_RIP();
2052 IEM_MC_END();
2053 return VINF_SUCCESS;
2054 }
2055
2056 return IEMOP_RAISE_INVALID_OPCODE();
2057}
2058
2059
2060/** Opcode 0x0f 0x19..0x1f. */
2061FNIEMOP_DEF(iemOp_nop_Ev)
2062{
2063 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2064 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2065 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2066 {
2067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2068 IEM_MC_BEGIN(0, 0);
2069 IEM_MC_ADVANCE_RIP();
2070 IEM_MC_END();
2071 }
2072 else
2073 {
2074 IEM_MC_BEGIN(0, 1);
2075 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2078 /* Currently a NOP. */
2079 NOREF(GCPtrEffSrc);
2080 IEM_MC_ADVANCE_RIP();
2081 IEM_MC_END();
2082 }
2083 return VINF_SUCCESS;
2084}
2085
2086
2087/** Opcode 0x0f 0x20. */
2088FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2089{
2090 /* mod is ignored, as is operand size overrides. */
2091 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2092 IEMOP_HLP_MIN_386();
2093 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2094 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2095 else
2096 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2097
2098 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2099 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2100 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2101 {
2102 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2103 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2104 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2105 iCrReg |= 8;
2106 }
2107 switch (iCrReg)
2108 {
2109 case 0: case 2: case 3: case 4: case 8:
2110 break;
2111 default:
2112 return IEMOP_RAISE_INVALID_OPCODE();
2113 }
2114 IEMOP_HLP_DONE_DECODING();
2115
2116 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2117}
2118
2119
2120/** Opcode 0x0f 0x21. */
2121FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2122{
2123 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2124 IEMOP_HLP_MIN_386();
2125 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2127 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2128 return IEMOP_RAISE_INVALID_OPCODE();
2129 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2130 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2131 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2132}
2133
2134
2135/** Opcode 0x0f 0x22. */
2136FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2137{
2138 /* mod is ignored, as is operand size overrides. */
2139 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2140 IEMOP_HLP_MIN_386();
2141 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2142 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2143 else
2144 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2145
2146 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2147 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2148 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2149 {
2150 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2151 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2152 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2153 iCrReg |= 8;
2154 }
2155 switch (iCrReg)
2156 {
2157 case 0: case 2: case 3: case 4: case 8:
2158 break;
2159 default:
2160 return IEMOP_RAISE_INVALID_OPCODE();
2161 }
2162 IEMOP_HLP_DONE_DECODING();
2163
2164 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2165}
2166
2167
2168/** Opcode 0x0f 0x23. */
2169FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2170{
2171 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2172 IEMOP_HLP_MIN_386();
2173 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2175 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2176 return IEMOP_RAISE_INVALID_OPCODE();
2177 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2178 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2179 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2180}
2181
2182
2183/** Opcode 0x0f 0x24. */
2184FNIEMOP_DEF(iemOp_mov_Rd_Td)
2185{
2186 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2187 /** @todo works on 386 and 486. */
2188 /* The RM byte is not considered, see testcase. */
2189 return IEMOP_RAISE_INVALID_OPCODE();
2190}
2191
2192
2193/** Opcode 0x0f 0x26. */
2194FNIEMOP_DEF(iemOp_mov_Td_Rd)
2195{
2196 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2197 /** @todo works on 386 and 486. */
2198 /* The RM byte is not considered, see testcase. */
2199 return IEMOP_RAISE_INVALID_OPCODE();
2200}
2201
2202
2203/**
2204 * @opcode 0x28
2205 * @oppfx none
2206 * @opcpuid sse
2207 * @opgroup og_sse_simdfp_datamove
2208 * @opxcpttype 1
2209 * @optest op1=1 op2=2 -> op1=2
2210 * @optest op1=0 op2=-42 -> op1=-42
2211 */
2212FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2213{
2214 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2216 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2217 {
2218 /*
2219 * Register, register.
2220 */
2221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2222 IEM_MC_BEGIN(0, 0);
2223 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2224 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2225 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2226 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2227 IEM_MC_ADVANCE_RIP();
2228 IEM_MC_END();
2229 }
2230 else
2231 {
2232 /*
2233 * Register, memory.
2234 */
2235 IEM_MC_BEGIN(0, 2);
2236 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2238
2239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2241 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2243
2244 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2245 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2246
2247 IEM_MC_ADVANCE_RIP();
2248 IEM_MC_END();
2249 }
2250 return VINF_SUCCESS;
2251}
2252
2253/**
2254 * @opcode 0x28
2255 * @oppfx 66
2256 * @opcpuid sse2
2257 * @opgroup og_sse2_pcksclr_datamove
2258 * @opxcpttype 1
2259 * @optest op1=1 op2=2 -> op1=2
2260 * @optest op1=0 op2=-42 -> op1=-42
2261 */
2262FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2263{
2264 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2265 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2266 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2267 {
2268 /*
2269 * Register, register.
2270 */
2271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2272 IEM_MC_BEGIN(0, 0);
2273 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2274 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2275 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2276 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2277 IEM_MC_ADVANCE_RIP();
2278 IEM_MC_END();
2279 }
2280 else
2281 {
2282 /*
2283 * Register, memory.
2284 */
2285 IEM_MC_BEGIN(0, 2);
2286 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2288
2289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2291 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2292 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2293
2294 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2295 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2296
2297 IEM_MC_ADVANCE_RIP();
2298 IEM_MC_END();
2299 }
2300 return VINF_SUCCESS;
2301}
2302
2303/* Opcode 0xf3 0x0f 0x28 - invalid */
2304/* Opcode 0xf2 0x0f 0x28 - invalid */
2305
2306/**
2307 * @opcode 0x29
2308 * @oppfx none
2309 * @opcpuid sse
2310 * @opgroup og_sse_simdfp_datamove
2311 * @opxcpttype 1
2312 * @optest op1=1 op2=2 -> op1=2
2313 * @optest op1=0 op2=-42 -> op1=-42
2314 */
2315FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2316{
2317 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2318 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2319 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2320 {
2321 /*
2322 * Register, register.
2323 */
2324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2325 IEM_MC_BEGIN(0, 0);
2326 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2327 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2328 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2329 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2330 IEM_MC_ADVANCE_RIP();
2331 IEM_MC_END();
2332 }
2333 else
2334 {
2335 /*
2336 * Memory, register.
2337 */
2338 IEM_MC_BEGIN(0, 2);
2339 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2341
2342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2344 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2345 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2346
2347 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2348 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2349
2350 IEM_MC_ADVANCE_RIP();
2351 IEM_MC_END();
2352 }
2353 return VINF_SUCCESS;
2354}
2355
2356/**
2357 * @opcode 0x29
2358 * @oppfx 66
2359 * @opcpuid sse2
2360 * @opgroup og_sse2_pcksclr_datamove
2361 * @opxcpttype 1
2362 * @optest op1=1 op2=2 -> op1=2
2363 * @optest op1=0 op2=-42 -> op1=-42
2364 */
2365FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2366{
2367 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2368 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2369 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2370 {
2371 /*
2372 * Register, register.
2373 */
2374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2375 IEM_MC_BEGIN(0, 0);
2376 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2377 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2378 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2379 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2380 IEM_MC_ADVANCE_RIP();
2381 IEM_MC_END();
2382 }
2383 else
2384 {
2385 /*
2386 * Memory, register.
2387 */
2388 IEM_MC_BEGIN(0, 2);
2389 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2391
2392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2394 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2395 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2396
2397 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2398 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2399
2400 IEM_MC_ADVANCE_RIP();
2401 IEM_MC_END();
2402 }
2403 return VINF_SUCCESS;
2404}
2405
2406/* Opcode 0xf3 0x0f 0x29 - invalid */
2407/* Opcode 0xf2 0x0f 0x29 - invalid */
2408
2409
2410/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2411FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2412/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2413FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2414/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2415FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2416/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2417FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2418
2419
2420/**
2421 * @opcode 0x2b
2422 * @opcodesub !11 mr/reg
2423 * @oppfx none
2424 * @opcpuid sse
2425 * @opgroup og_sse1_cachect
2426 * @opxcpttype 1
2427 * @optest op1=1 op2=2 -> op1=2
2428 * @optest op1=0 op2=-42 -> op1=-42
2429 */
2430FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2431{
2432 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2433 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2434 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2435 {
2436 /*
2437 * memory, register.
2438 */
2439 IEM_MC_BEGIN(0, 2);
2440 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2441 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2442
2443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2445 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2446 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2447
2448 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2449 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2450
2451 IEM_MC_ADVANCE_RIP();
2452 IEM_MC_END();
2453 }
2454 /* The register, register encoding is invalid. */
2455 else
2456 return IEMOP_RAISE_INVALID_OPCODE();
2457 return VINF_SUCCESS;
2458}
2459
2460/**
2461 * @opcode 0x2b
2462 * @opcodesub !11 mr/reg
2463 * @oppfx 0x66
2464 * @opcpuid sse2
2465 * @opgroup og_sse2_cachect
2466 * @opxcpttype 1
2467 * @optest op1=1 op2=2 -> op1=2
2468 * @optest op1=0 op2=-42 -> op1=-42
2469 */
2470FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2471{
2472 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2474 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2475 {
2476 /*
2477 * memory, register.
2478 */
2479 IEM_MC_BEGIN(0, 2);
2480 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2482
2483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2485 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2486 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2487
2488 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2489 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2490
2491 IEM_MC_ADVANCE_RIP();
2492 IEM_MC_END();
2493 }
2494 /* The register, register encoding is invalid. */
2495 else
2496 return IEMOP_RAISE_INVALID_OPCODE();
2497 return VINF_SUCCESS;
2498}
2499/* Opcode 0xf3 0x0f 0x2b - invalid */
2500/* Opcode 0xf2 0x0f 0x2b - invalid */
2501
2502
2503/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2504FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2505/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2506FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2507/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2508FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2509/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2510FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2511
2512/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2513FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2514/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2515FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2516/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2517FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2518/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2519FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2520
2521/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2522FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2523/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2524FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2525/* Opcode 0xf3 0x0f 0x2e - invalid */
2526/* Opcode 0xf2 0x0f 0x2e - invalid */
2527
2528/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2529FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2530/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2531FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2532/* Opcode 0xf3 0x0f 0x2f - invalid */
2533/* Opcode 0xf2 0x0f 0x2f - invalid */
2534
2535/** Opcode 0x0f 0x30. */
2536FNIEMOP_DEF(iemOp_wrmsr)
2537{
2538 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2540 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2541}
2542
2543
2544/** Opcode 0x0f 0x31. */
2545FNIEMOP_DEF(iemOp_rdtsc)
2546{
2547 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2549 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2550}
2551
2552
2553/** Opcode 0x0f 0x33. */
2554FNIEMOP_DEF(iemOp_rdmsr)
2555{
2556 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2558 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2559}
2560
2561
2562/** Opcode 0x0f 0x34. */
2563FNIEMOP_DEF(iemOp_rdpmc)
2564{
2565 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2567 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2568}
2569
2570
2571/** Opcode 0x0f 0x34. */
2572FNIEMOP_STUB(iemOp_sysenter);
2573/** Opcode 0x0f 0x35. */
2574FNIEMOP_STUB(iemOp_sysexit);
2575/** Opcode 0x0f 0x37. */
2576FNIEMOP_STUB(iemOp_getsec);
2577
2578
2579/** Opcode 0x0f 0x38. */
2580FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2581{
2582#ifdef IEM_WITH_THREE_0F_38
2583 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2584 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2585#else
2586 IEMOP_BITCH_ABOUT_STUB();
2587 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2588#endif
2589}
2590
2591
2592/** Opcode 0x0f 0x3a. */
2593FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2594{
2595#ifdef IEM_WITH_THREE_0F_3A
2596 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2597 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2598#else
2599 IEMOP_BITCH_ABOUT_STUB();
2600 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2601#endif
2602}
2603
2604
2605/**
2606 * Implements a conditional move.
2607 *
2608 * Wish there was an obvious way to do this where we could share and reduce
2609 * code bloat.
2610 *
2611 * @param a_Cnd The conditional "microcode" operation.
2612 */
2613#define CMOV_X(a_Cnd) \
2614 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2615 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2616 { \
2617 switch (pVCpu->iem.s.enmEffOpSize) \
2618 { \
2619 case IEMMODE_16BIT: \
2620 IEM_MC_BEGIN(0, 1); \
2621 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2622 a_Cnd { \
2623 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2624 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2625 } IEM_MC_ENDIF(); \
2626 IEM_MC_ADVANCE_RIP(); \
2627 IEM_MC_END(); \
2628 return VINF_SUCCESS; \
2629 \
2630 case IEMMODE_32BIT: \
2631 IEM_MC_BEGIN(0, 1); \
2632 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2633 a_Cnd { \
2634 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2635 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2636 } IEM_MC_ELSE() { \
2637 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2638 } IEM_MC_ENDIF(); \
2639 IEM_MC_ADVANCE_RIP(); \
2640 IEM_MC_END(); \
2641 return VINF_SUCCESS; \
2642 \
2643 case IEMMODE_64BIT: \
2644 IEM_MC_BEGIN(0, 1); \
2645 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2646 a_Cnd { \
2647 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2648 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2649 } IEM_MC_ENDIF(); \
2650 IEM_MC_ADVANCE_RIP(); \
2651 IEM_MC_END(); \
2652 return VINF_SUCCESS; \
2653 \
2654 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2655 } \
2656 } \
2657 else \
2658 { \
2659 switch (pVCpu->iem.s.enmEffOpSize) \
2660 { \
2661 case IEMMODE_16BIT: \
2662 IEM_MC_BEGIN(0, 2); \
2663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2664 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2666 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2667 a_Cnd { \
2668 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2669 } IEM_MC_ENDIF(); \
2670 IEM_MC_ADVANCE_RIP(); \
2671 IEM_MC_END(); \
2672 return VINF_SUCCESS; \
2673 \
2674 case IEMMODE_32BIT: \
2675 IEM_MC_BEGIN(0, 2); \
2676 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2677 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2679 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2680 a_Cnd { \
2681 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2682 } IEM_MC_ELSE() { \
2683 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2684 } IEM_MC_ENDIF(); \
2685 IEM_MC_ADVANCE_RIP(); \
2686 IEM_MC_END(); \
2687 return VINF_SUCCESS; \
2688 \
2689 case IEMMODE_64BIT: \
2690 IEM_MC_BEGIN(0, 2); \
2691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2692 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2694 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2695 a_Cnd { \
2696 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2697 } IEM_MC_ENDIF(); \
2698 IEM_MC_ADVANCE_RIP(); \
2699 IEM_MC_END(); \
2700 return VINF_SUCCESS; \
2701 \
2702 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2703 } \
2704 } do {} while (0)
2705
2706
2707
2708/** Opcode 0x0f 0x40. */
2709FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2710{
2711 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2712 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2713}
2714
2715
2716/** Opcode 0x0f 0x41. */
2717FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2718{
2719 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2720 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2721}
2722
2723
2724/** Opcode 0x0f 0x42. */
2725FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2726{
2727 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2728 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2729}
2730
2731
2732/** Opcode 0x0f 0x43. */
2733FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2734{
2735 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2736 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2737}
2738
2739
2740/** Opcode 0x0f 0x44. */
2741FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2742{
2743 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2744 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2745}
2746
2747
2748/** Opcode 0x0f 0x45. */
2749FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2750{
2751 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2752 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2753}
2754
2755
2756/** Opcode 0x0f 0x46. */
2757FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2758{
2759 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2760 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2761}
2762
2763
2764/** Opcode 0x0f 0x47. */
2765FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2766{
2767 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2768 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2769}
2770
2771
2772/** Opcode 0x0f 0x48. */
2773FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2774{
2775 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2776 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2777}
2778
2779
2780/** Opcode 0x0f 0x49. */
2781FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2782{
2783 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2784 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2785}
2786
2787
2788/** Opcode 0x0f 0x4a. */
2789FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2790{
2791 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2792 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2793}
2794
2795
2796/** Opcode 0x0f 0x4b. */
2797FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2798{
2799 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2800 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2801}
2802
2803
2804/** Opcode 0x0f 0x4c. */
2805FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2806{
2807 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2808 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2809}
2810
2811
2812/** Opcode 0x0f 0x4d. */
2813FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2814{
2815 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2816 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2817}
2818
2819
2820/** Opcode 0x0f 0x4e. */
2821FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2822{
2823 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2824 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2825}
2826
2827
2828/** Opcode 0x0f 0x4f. */
2829FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2830{
2831 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2832 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2833}
2834
2835#undef CMOV_X
2836
2837/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2838FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2839/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2840FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2841/* Opcode 0xf3 0x0f 0x50 - invalid */
2842/* Opcode 0xf2 0x0f 0x50 - invalid */
2843
2844/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2845FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2846/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2847FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2848/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2849FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2850/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2851FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2852
2853/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2854FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2855/* Opcode 0x66 0x0f 0x52 - invalid */
2856/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2857FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2858/* Opcode 0xf2 0x0f 0x52 - invalid */
2859
2860/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2861FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2862/* Opcode 0x66 0x0f 0x53 - invalid */
2863/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2864FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2865/* Opcode 0xf2 0x0f 0x53 - invalid */
2866
2867/** Opcode 0x0f 0x54 - andps Vps, Wps */
2868FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2869/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2870FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2871/* Opcode 0xf3 0x0f 0x54 - invalid */
2872/* Opcode 0xf2 0x0f 0x54 - invalid */
2873
2874/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2875FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2876/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2877FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2878/* Opcode 0xf3 0x0f 0x55 - invalid */
2879/* Opcode 0xf2 0x0f 0x55 - invalid */
2880
2881/** Opcode 0x0f 0x56 - orps Vps, Wps */
2882FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2883/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2884FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2885/* Opcode 0xf3 0x0f 0x56 - invalid */
2886/* Opcode 0xf2 0x0f 0x56 - invalid */
2887
2888/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2889FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2890/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2891FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2892/* Opcode 0xf3 0x0f 0x57 - invalid */
2893/* Opcode 0xf2 0x0f 0x57 - invalid */
2894
2895/** Opcode 0x0f 0x58 - addps Vps, Wps */
2896FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2897/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2898FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2899/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2900FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2901/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2902FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2903
2904/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2905FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2906/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2907FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2908/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2909FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2910/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2911FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2912
2913/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2914FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2915/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2916FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2917/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2918FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2919/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2920FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2921
2922/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2923FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2924/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2925FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
2926/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
2927FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
2928/* Opcode 0xf2 0x0f 0x5b - invalid */
2929
2930/** Opcode 0x0f 0x5c - subps Vps, Wps */
2931FNIEMOP_STUB(iemOp_subps_Vps_Wps);
2932/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
2933FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
2934/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
2935FNIEMOP_STUB(iemOp_subss_Vss_Wss);
2936/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
2937FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
2938
2939/** Opcode 0x0f 0x5d - minps Vps, Wps */
2940FNIEMOP_STUB(iemOp_minps_Vps_Wps);
2941/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
2942FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
2943/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
2944FNIEMOP_STUB(iemOp_minss_Vss_Wss);
2945/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
2946FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
2947
2948/** Opcode 0x0f 0x5e - divps Vps, Wps */
2949FNIEMOP_STUB(iemOp_divps_Vps_Wps);
2950/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
2951FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
2952/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
2953FNIEMOP_STUB(iemOp_divss_Vss_Wss);
2954/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
2955FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
2956
2957/** Opcode 0x0f 0x5f - maxps Vps, Wps */
2958FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
2959/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
2960FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
2961/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
2962FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
2963/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
2964FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
2965
2966/**
2967 * Common worker for MMX instructions on the forms:
2968 * pxxxx mm1, mm2/mem32
2969 *
2970 * The 2nd operand is the first half of a register, which in the memory case
2971 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2972 * memory accessed for MMX.
2973 *
2974 * Exceptions type 4.
2975 */
2976FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2977{
2978 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2979 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2980 {
2981 /*
2982 * Register, register.
2983 */
2984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2985 IEM_MC_BEGIN(2, 0);
2986 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2987 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2988 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2989 IEM_MC_PREPARE_SSE_USAGE();
2990 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2991 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2992 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2993 IEM_MC_ADVANCE_RIP();
2994 IEM_MC_END();
2995 }
2996 else
2997 {
2998 /*
2999 * Register, memory.
3000 */
3001 IEM_MC_BEGIN(2, 2);
3002 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3003 IEM_MC_LOCAL(uint64_t, uSrc);
3004 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3006
3007 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3009 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3010 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3011
3012 IEM_MC_PREPARE_SSE_USAGE();
3013 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3014 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3015
3016 IEM_MC_ADVANCE_RIP();
3017 IEM_MC_END();
3018 }
3019 return VINF_SUCCESS;
3020}
3021
3022
3023/**
3024 * Common worker for SSE2 instructions on the forms:
3025 * pxxxx xmm1, xmm2/mem128
3026 *
3027 * The 2nd operand is the first half of a register, which in the memory case
3028 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3029 * memory accessed for MMX.
3030 *
3031 * Exceptions type 4.
3032 */
3033FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3034{
3035 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3036 if (!pImpl->pfnU64)
3037 return IEMOP_RAISE_INVALID_OPCODE();
3038 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3039 {
3040 /*
3041 * Register, register.
3042 */
3043 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3044 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3046 IEM_MC_BEGIN(2, 0);
3047 IEM_MC_ARG(uint64_t *, pDst, 0);
3048 IEM_MC_ARG(uint32_t const *, pSrc, 1);
3049 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3050 IEM_MC_PREPARE_FPU_USAGE();
3051 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3052 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3053 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3054 IEM_MC_ADVANCE_RIP();
3055 IEM_MC_END();
3056 }
3057 else
3058 {
3059 /*
3060 * Register, memory.
3061 */
3062 IEM_MC_BEGIN(2, 2);
3063 IEM_MC_ARG(uint64_t *, pDst, 0);
3064 IEM_MC_LOCAL(uint32_t, uSrc);
3065 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3066 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3067
3068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3070 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3071 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3072
3073 IEM_MC_PREPARE_FPU_USAGE();
3074 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3075 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3076
3077 IEM_MC_ADVANCE_RIP();
3078 IEM_MC_END();
3079 }
3080 return VINF_SUCCESS;
3081}
3082
3083
3084/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3085FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3086{
3087 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3088 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3089}
3090
3091/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3092FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3093{
3094 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3095 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3096}
3097
3098/* Opcode 0xf3 0x0f 0x60 - invalid */
3099
3100
3101/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3102FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3103{
3104 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3105 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3106}
3107
3108/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3109FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3110{
3111 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3112 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3113}
3114
3115/* Opcode 0xf3 0x0f 0x61 - invalid */
3116
3117
3118/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3119FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3120{
3121 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3122 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3123}
3124
3125/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3126FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3127{
3128 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3129 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3130}
3131
3132/* Opcode 0xf3 0x0f 0x62 - invalid */
3133
3134
3135
3136/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3137FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3138/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3139FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3140/* Opcode 0xf3 0x0f 0x63 - invalid */
3141
3142/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3143FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3144/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3145FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3146/* Opcode 0xf3 0x0f 0x64 - invalid */
3147
3148/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3149FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3150/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3151FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3152/* Opcode 0xf3 0x0f 0x65 - invalid */
3153
3154/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3155FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3156/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3157FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3158/* Opcode 0xf3 0x0f 0x66 - invalid */
3159
3160/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3161FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3162/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3163FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3164/* Opcode 0xf3 0x0f 0x67 - invalid */
3165
3166
3167/**
3168 * Common worker for MMX instructions on the form:
3169 * pxxxx mm1, mm2/mem64
3170 *
3171 * The 2nd operand is the second half of a register, which in the memory case
3172 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3173 * where it may read the full 128 bits or only the upper 64 bits.
3174 *
3175 * Exceptions type 4.
3176 */
3177FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3178{
3179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3180 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3181 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3182 {
3183 /*
3184 * Register, register.
3185 */
3186 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3187 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3189 IEM_MC_BEGIN(2, 0);
3190 IEM_MC_ARG(uint64_t *, pDst, 0);
3191 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3192 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3193 IEM_MC_PREPARE_FPU_USAGE();
3194 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3195 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3196 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3197 IEM_MC_ADVANCE_RIP();
3198 IEM_MC_END();
3199 }
3200 else
3201 {
3202 /*
3203 * Register, memory.
3204 */
3205 IEM_MC_BEGIN(2, 2);
3206 IEM_MC_ARG(uint64_t *, pDst, 0);
3207 IEM_MC_LOCAL(uint64_t, uSrc);
3208 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3210
3211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3213 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3214 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3215
3216 IEM_MC_PREPARE_FPU_USAGE();
3217 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3218 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3219
3220 IEM_MC_ADVANCE_RIP();
3221 IEM_MC_END();
3222 }
3223 return VINF_SUCCESS;
3224}
3225
3226
3227/**
3228 * Common worker for SSE2 instructions on the form:
3229 * pxxxx xmm1, xmm2/mem128
3230 *
3231 * The 2nd operand is the second half of a register, which in the memory case
3232 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3233 * where it may read the full 128 bits or only the upper 64 bits.
3234 *
3235 * Exceptions type 4.
3236 */
3237FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3238{
3239 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3240 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3241 {
3242 /*
3243 * Register, register.
3244 */
3245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3246 IEM_MC_BEGIN(2, 0);
3247 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3248 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3249 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3250 IEM_MC_PREPARE_SSE_USAGE();
3251 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3252 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3253 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3254 IEM_MC_ADVANCE_RIP();
3255 IEM_MC_END();
3256 }
3257 else
3258 {
3259 /*
3260 * Register, memory.
3261 */
3262 IEM_MC_BEGIN(2, 2);
3263 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3264 IEM_MC_LOCAL(RTUINT128U, uSrc);
3265 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3267
3268 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3270 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3271 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3272
3273 IEM_MC_PREPARE_SSE_USAGE();
3274 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3275 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3276
3277 IEM_MC_ADVANCE_RIP();
3278 IEM_MC_END();
3279 }
3280 return VINF_SUCCESS;
3281}
3282
3283
3284/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3285FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3286{
3287 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3288 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3289}
3290
3291/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3292FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3293{
3294 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3295 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3296}
3297/* Opcode 0xf3 0x0f 0x68 - invalid */
3298
3299
3300/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3301FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3302{
3303 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3304 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3305}
3306
3307/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3308FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3309{
3310 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3311 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3312
3313}
3314/* Opcode 0xf3 0x0f 0x69 - invalid */
3315
3316
3317/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3318FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3319{
3320 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3321 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3322}
3323
3324/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3325FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3326{
3327 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3328 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3329}
3330/* Opcode 0xf3 0x0f 0x6a - invalid */
3331
3332
3333/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3334FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3335/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3336FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3337/* Opcode 0xf3 0x0f 0x6b - invalid */
3338
3339
3340/* Opcode 0x0f 0x6c - invalid */
3341
3342/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3343FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3344{
3345 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3346 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3347}
3348
3349/* Opcode 0xf3 0x0f 0x6c - invalid */
3350/* Opcode 0xf2 0x0f 0x6c - invalid */
3351
3352
3353/* Opcode 0x0f 0x6d - invalid */
3354
3355/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3356FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3357{
3358 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3359 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3360}
3361
3362/* Opcode 0xf3 0x0f 0x6d - invalid */
3363
3364
3365FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3366{
3367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3368 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3369 {
3370 /**
3371 * @opcode 0x6e
3372 * @opcodesub rex.w=1
3373 * @oppfx none
3374 * @opcpuid mmx
3375 * @opgroup og_mmx_datamove
3376 * @opxcpttype 5
3377 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
3378 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
3379 */
3380 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3381 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3382 {
3383 /* MMX, greg64 */
3384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3385 IEM_MC_BEGIN(0, 1);
3386 IEM_MC_LOCAL(uint64_t, u64Tmp);
3387
3388 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3389 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3390
3391 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3392 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3393 IEM_MC_FPU_TO_MMX_MODE();
3394
3395 IEM_MC_ADVANCE_RIP();
3396 IEM_MC_END();
3397 }
3398 else
3399 {
3400 /* MMX, [mem64] */
3401 IEM_MC_BEGIN(0, 2);
3402 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3403 IEM_MC_LOCAL(uint64_t, u64Tmp);
3404
3405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3407 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3408 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3409
3410 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3411 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3412 IEM_MC_FPU_TO_MMX_MODE();
3413
3414 IEM_MC_ADVANCE_RIP();
3415 IEM_MC_END();
3416 }
3417 }
3418 else
3419 {
3420 /**
3421 * @opdone
3422 * @opcode 0x6e
3423 * @opcodesub rex.w=0
3424 * @oppfx none
3425 * @opcpuid mmx
3426 * @opgroup og_mmx_datamove
3427 * @opxcpttype 5
3428 * @opfunction iemOp_movd_q_Pd_Ey
3429 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3430 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3431 */
3432 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3433 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3434 {
3435 /* MMX, greg */
3436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3437 IEM_MC_BEGIN(0, 1);
3438 IEM_MC_LOCAL(uint64_t, u64Tmp);
3439
3440 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3441 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3442
3443 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3444 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3445 IEM_MC_FPU_TO_MMX_MODE();
3446
3447 IEM_MC_ADVANCE_RIP();
3448 IEM_MC_END();
3449 }
3450 else
3451 {
3452 /* MMX, [mem] */
3453 IEM_MC_BEGIN(0, 2);
3454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3455 IEM_MC_LOCAL(uint32_t, u32Tmp);
3456
3457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3459 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3460 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3461
3462 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3463 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3464 IEM_MC_FPU_TO_MMX_MODE();
3465
3466 IEM_MC_ADVANCE_RIP();
3467 IEM_MC_END();
3468 }
3469 }
3470 return VINF_SUCCESS;
3471}
3472
3473FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3474{
3475 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3476 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3477 {
3478 /**
3479 * @opcode 0x6e
3480 * @opcodesub rex.w=1
3481 * @oppfx 0x66
3482 * @opcpuid sse2
3483 * @opgroup og_sse2_simdint_datamove
3484 * @opxcpttype 5
3485 * @optest 64-bit / op1=1 op2=2 -> op1=2
3486 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3487 */
3488 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3489 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3490 {
3491 /* XMM, greg64 */
3492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3493 IEM_MC_BEGIN(0, 1);
3494 IEM_MC_LOCAL(uint64_t, u64Tmp);
3495
3496 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3497 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3498
3499 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3500 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3501
3502 IEM_MC_ADVANCE_RIP();
3503 IEM_MC_END();
3504 }
3505 else
3506 {
3507 /* XMM, [mem64] */
3508 IEM_MC_BEGIN(0, 2);
3509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3510 IEM_MC_LOCAL(uint64_t, u64Tmp);
3511
3512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3514 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3515 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3516
3517 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3518 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3519
3520 IEM_MC_ADVANCE_RIP();
3521 IEM_MC_END();
3522 }
3523 }
3524 else
3525 {
3526 /**
3527 * @opdone
3528 * @opcode 0x6e
3529 * @opcodesub rex.w=0
3530 * @oppfx 0x66
3531 * @opcpuid sse2
3532 * @opgroup og_sse2_simdint_datamove
3533 * @opxcpttype 5
3534 * @opfunction iemOp_movd_q_Vy_Ey
3535 * @optest op1=1 op2=2 -> op1=2
3536 * @optest op1=0 op2=-42 -> op1=-42
3537 */
3538 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3539 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3540 {
3541 /* XMM, greg32 */
3542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3543 IEM_MC_BEGIN(0, 1);
3544 IEM_MC_LOCAL(uint32_t, u32Tmp);
3545
3546 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3547 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3548
3549 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3550 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3551
3552 IEM_MC_ADVANCE_RIP();
3553 IEM_MC_END();
3554 }
3555 else
3556 {
3557 /* XMM, [mem32] */
3558 IEM_MC_BEGIN(0, 2);
3559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3560 IEM_MC_LOCAL(uint32_t, u32Tmp);
3561
3562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3564 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3565 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3566
3567 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3568 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3569
3570 IEM_MC_ADVANCE_RIP();
3571 IEM_MC_END();
3572 }
3573 }
3574 return VINF_SUCCESS;
3575}
3576
3577/* Opcode 0xf3 0x0f 0x6e - invalid */
3578
3579
3580/**
3581 * @opcode 0x6f
3582 * @oppfx none
3583 * @opcpuid mmx
3584 * @opgroup og_mmx_datamove
3585 * @opxcpttype 5
3586 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3587 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3588 */
3589FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3590{
3591 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3592 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3593 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3594 {
3595 /*
3596 * Register, register.
3597 */
3598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3599 IEM_MC_BEGIN(0, 1);
3600 IEM_MC_LOCAL(uint64_t, u64Tmp);
3601
3602 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3603 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3604
3605 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3606 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3607 IEM_MC_FPU_TO_MMX_MODE();
3608
3609 IEM_MC_ADVANCE_RIP();
3610 IEM_MC_END();
3611 }
3612 else
3613 {
3614 /*
3615 * Register, memory.
3616 */
3617 IEM_MC_BEGIN(0, 2);
3618 IEM_MC_LOCAL(uint64_t, u64Tmp);
3619 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3620
3621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3623 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3624 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3625
3626 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3627 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3628 IEM_MC_FPU_TO_MMX_MODE();
3629
3630 IEM_MC_ADVANCE_RIP();
3631 IEM_MC_END();
3632 }
3633 return VINF_SUCCESS;
3634}
3635
3636/**
3637 * @opcode 0x6f
3638 * @oppfx 0x66
3639 * @opcpuid sse2
3640 * @opgroup og_sse2_simdint_datamove
3641 * @opxcpttype 1
3642 * @optest op1=1 op2=2 -> op1=2
3643 * @optest op1=0 op2=-42 -> op1=-42
3644 */
3645FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
3646{
3647 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3648 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3649 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3650 {
3651 /*
3652 * Register, register.
3653 */
3654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3655 IEM_MC_BEGIN(0, 0);
3656
3657 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3658 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3659
3660 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3661 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3662 IEM_MC_ADVANCE_RIP();
3663 IEM_MC_END();
3664 }
3665 else
3666 {
3667 /*
3668 * Register, memory.
3669 */
3670 IEM_MC_BEGIN(0, 2);
3671 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3673
3674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3676 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3677 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3678
3679 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3680 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3681
3682 IEM_MC_ADVANCE_RIP();
3683 IEM_MC_END();
3684 }
3685 return VINF_SUCCESS;
3686}
3687
3688/**
3689 * @opcode 0x6f
3690 * @oppfx 0xf3
3691 * @opcpuid sse2
3692 * @opgroup og_sse2_simdint_datamove
3693 * @opxcpttype 4UA
3694 * @optest op1=1 op2=2 -> op1=2
3695 * @optest op1=0 op2=-42 -> op1=-42
3696 */
3697FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
3698{
3699 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3700 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3701 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3702 {
3703 /*
3704 * Register, register.
3705 */
3706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3707 IEM_MC_BEGIN(0, 0);
3708 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3709 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3710 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3711 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3712 IEM_MC_ADVANCE_RIP();
3713 IEM_MC_END();
3714 }
3715 else
3716 {
3717 /*
3718 * Register, memory.
3719 */
3720 IEM_MC_BEGIN(0, 2);
3721 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3722 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3723
3724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3726 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3727 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3728 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3729 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3730
3731 IEM_MC_ADVANCE_RIP();
3732 IEM_MC_END();
3733 }
3734 return VINF_SUCCESS;
3735}
3736
3737
3738/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3739FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3740{
3741 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3742 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3743 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3744 {
3745 /*
3746 * Register, register.
3747 */
3748 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3750
3751 IEM_MC_BEGIN(3, 0);
3752 IEM_MC_ARG(uint64_t *, pDst, 0);
3753 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3754 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3755 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3756 IEM_MC_PREPARE_FPU_USAGE();
3757 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3758 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3759 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3760 IEM_MC_ADVANCE_RIP();
3761 IEM_MC_END();
3762 }
3763 else
3764 {
3765 /*
3766 * Register, memory.
3767 */
3768 IEM_MC_BEGIN(3, 2);
3769 IEM_MC_ARG(uint64_t *, pDst, 0);
3770 IEM_MC_LOCAL(uint64_t, uSrc);
3771 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3773
3774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3775 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3776 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3778 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3779
3780 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3781 IEM_MC_PREPARE_FPU_USAGE();
3782 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3783 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3784
3785 IEM_MC_ADVANCE_RIP();
3786 IEM_MC_END();
3787 }
3788 return VINF_SUCCESS;
3789}
3790
3791/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3792FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3793{
3794 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3795 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3796 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3797 {
3798 /*
3799 * Register, register.
3800 */
3801 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3803
3804 IEM_MC_BEGIN(3, 0);
3805 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3806 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3807 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3808 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3809 IEM_MC_PREPARE_SSE_USAGE();
3810 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3811 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3812 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3813 IEM_MC_ADVANCE_RIP();
3814 IEM_MC_END();
3815 }
3816 else
3817 {
3818 /*
3819 * Register, memory.
3820 */
3821 IEM_MC_BEGIN(3, 2);
3822 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3823 IEM_MC_LOCAL(RTUINT128U, uSrc);
3824 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3826
3827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3828 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3829 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3831 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3832
3833 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3834 IEM_MC_PREPARE_SSE_USAGE();
3835 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3836 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3837
3838 IEM_MC_ADVANCE_RIP();
3839 IEM_MC_END();
3840 }
3841 return VINF_SUCCESS;
3842}
3843
3844/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3845FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3846{
3847 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3848 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3849 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3850 {
3851 /*
3852 * Register, register.
3853 */
3854 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3856
3857 IEM_MC_BEGIN(3, 0);
3858 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3859 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3860 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3861 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3862 IEM_MC_PREPARE_SSE_USAGE();
3863 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3864 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3865 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3866 IEM_MC_ADVANCE_RIP();
3867 IEM_MC_END();
3868 }
3869 else
3870 {
3871 /*
3872 * Register, memory.
3873 */
3874 IEM_MC_BEGIN(3, 2);
3875 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3876 IEM_MC_LOCAL(RTUINT128U, uSrc);
3877 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3879
3880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3881 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3882 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3884 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3885
3886 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3887 IEM_MC_PREPARE_SSE_USAGE();
3888 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3889 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3890
3891 IEM_MC_ADVANCE_RIP();
3892 IEM_MC_END();
3893 }
3894 return VINF_SUCCESS;
3895}
3896
3897/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3898FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3899{
3900 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3901 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3902 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3903 {
3904 /*
3905 * Register, register.
3906 */
3907 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3909
3910 IEM_MC_BEGIN(3, 0);
3911 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3912 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3913 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3914 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3915 IEM_MC_PREPARE_SSE_USAGE();
3916 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3917 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3918 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3919 IEM_MC_ADVANCE_RIP();
3920 IEM_MC_END();
3921 }
3922 else
3923 {
3924 /*
3925 * Register, memory.
3926 */
3927 IEM_MC_BEGIN(3, 2);
3928 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3929 IEM_MC_LOCAL(RTUINT128U, uSrc);
3930 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3932
3933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3934 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3935 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3937 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3938
3939 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3940 IEM_MC_PREPARE_SSE_USAGE();
3941 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3942 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3943
3944 IEM_MC_ADVANCE_RIP();
3945 IEM_MC_END();
3946 }
3947 return VINF_SUCCESS;
3948}
3949
3950
3951/** Opcode 0x0f 0x71 11/2. */
3952FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3953
3954/** Opcode 0x66 0x0f 0x71 11/2. */
3955FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3956
3957/** Opcode 0x0f 0x71 11/4. */
3958FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3959
3960/** Opcode 0x66 0x0f 0x71 11/4. */
3961FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3962
3963/** Opcode 0x0f 0x71 11/6. */
3964FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3965
3966/** Opcode 0x66 0x0f 0x71 11/6. */
3967FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3968
3969
3970/**
3971 * Group 12 jump table for register variant.
3972 */
3973IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3974{
3975 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3976 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3977 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3978 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3979 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3980 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3981 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3982 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3983};
3984AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3985
3986
3987/** Opcode 0x0f 0x71. */
3988FNIEMOP_DEF(iemOp_Grp12)
3989{
3990 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3991 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3992 /* register, register */
3993 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3994 + pVCpu->iem.s.idxPrefix], bRm);
3995 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3996}
3997
3998
3999/** Opcode 0x0f 0x72 11/2. */
4000FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
4001
4002/** Opcode 0x66 0x0f 0x72 11/2. */
4003FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
4004
4005/** Opcode 0x0f 0x72 11/4. */
4006FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
4007
4008/** Opcode 0x66 0x0f 0x72 11/4. */
4009FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
4010
4011/** Opcode 0x0f 0x72 11/6. */
4012FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
4013
4014/** Opcode 0x66 0x0f 0x72 11/6. */
4015FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
4016
4017
4018/**
4019 * Group 13 jump table for register variant.
4020 */
4021IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
4022{
4023 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4024 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4025 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4026 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4027 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4028 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4029 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4030 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4031};
4032AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
4033
4034/** Opcode 0x0f 0x72. */
4035FNIEMOP_DEF(iemOp_Grp13)
4036{
4037 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4038 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4039 /* register, register */
4040 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4041 + pVCpu->iem.s.idxPrefix], bRm);
4042 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4043}
4044
4045
4046/** Opcode 0x0f 0x73 11/2. */
4047FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
4048
4049/** Opcode 0x66 0x0f 0x73 11/2. */
4050FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
4051
4052/** Opcode 0x66 0x0f 0x73 11/3. */
4053FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
4054
4055/** Opcode 0x0f 0x73 11/6. */
4056FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
4057
4058/** Opcode 0x66 0x0f 0x73 11/6. */
4059FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
4060
4061/** Opcode 0x66 0x0f 0x73 11/7. */
4062FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
4063
4064/**
4065 * Group 14 jump table for register variant.
4066 */
4067IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4068{
4069 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4070 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4071 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4072 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4073 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4074 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4075 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4076 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4077};
4078AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4079
4080
4081/** Opcode 0x0f 0x73. */
4082FNIEMOP_DEF(iemOp_Grp14)
4083{
4084 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4085 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4086 /* register, register */
4087 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4088 + pVCpu->iem.s.idxPrefix], bRm);
4089 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4090}
4091
4092
4093/**
4094 * Common worker for MMX instructions on the form:
4095 * pxxx mm1, mm2/mem64
4096 */
4097FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4098{
4099 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4101 {
4102 /*
4103 * Register, register.
4104 */
4105 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4106 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4108 IEM_MC_BEGIN(2, 0);
4109 IEM_MC_ARG(uint64_t *, pDst, 0);
4110 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4111 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4112 IEM_MC_PREPARE_FPU_USAGE();
4113 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4114 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
4115 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4116 IEM_MC_ADVANCE_RIP();
4117 IEM_MC_END();
4118 }
4119 else
4120 {
4121 /*
4122 * Register, memory.
4123 */
4124 IEM_MC_BEGIN(2, 2);
4125 IEM_MC_ARG(uint64_t *, pDst, 0);
4126 IEM_MC_LOCAL(uint64_t, uSrc);
4127 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4129
4130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4132 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4133 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4134
4135 IEM_MC_PREPARE_FPU_USAGE();
4136 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4137 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4138
4139 IEM_MC_ADVANCE_RIP();
4140 IEM_MC_END();
4141 }
4142 return VINF_SUCCESS;
4143}
4144
4145
4146/**
4147 * Common worker for SSE2 instructions on the forms:
4148 * pxxx xmm1, xmm2/mem128
4149 *
4150 * Proper alignment of the 128-bit operand is enforced.
4151 * Exceptions type 4. SSE2 cpuid checks.
4152 */
4153FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4154{
4155 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4156 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4157 {
4158 /*
4159 * Register, register.
4160 */
4161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4162 IEM_MC_BEGIN(2, 0);
4163 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4164 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4165 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4166 IEM_MC_PREPARE_SSE_USAGE();
4167 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4168 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4169 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4170 IEM_MC_ADVANCE_RIP();
4171 IEM_MC_END();
4172 }
4173 else
4174 {
4175 /*
4176 * Register, memory.
4177 */
4178 IEM_MC_BEGIN(2, 2);
4179 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4180 IEM_MC_LOCAL(RTUINT128U, uSrc);
4181 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4183
4184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4186 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4187 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4188
4189 IEM_MC_PREPARE_SSE_USAGE();
4190 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4191 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4192
4193 IEM_MC_ADVANCE_RIP();
4194 IEM_MC_END();
4195 }
4196 return VINF_SUCCESS;
4197}
4198
4199
4200/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4201FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4202{
4203 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
4204 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4205}
4206
4207/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4208FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4209{
4210 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
4211 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4212}
4213
4214/* Opcode 0xf3 0x0f 0x74 - invalid */
4215/* Opcode 0xf2 0x0f 0x74 - invalid */
4216
4217
4218/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4219FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4220{
4221 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
4222 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4223}
4224
4225/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4226FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4227{
4228 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
4229 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4230}
4231
4232/* Opcode 0xf3 0x0f 0x75 - invalid */
4233/* Opcode 0xf2 0x0f 0x75 - invalid */
4234
4235
4236/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4237FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4238{
4239 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
4240 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4241}
4242
4243/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4244FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4245{
4246 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
4247 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4248}
4249
4250/* Opcode 0xf3 0x0f 0x76 - invalid */
4251/* Opcode 0xf2 0x0f 0x76 - invalid */
4252
4253
4254/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4255FNIEMOP_DEF(iemOp_emms)
4256{
4257 IEMOP_MNEMONIC(emms, "emms");
4258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4259
4260 IEM_MC_BEGIN(0,0);
4261 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
4262 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4263 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4264 IEM_MC_FPU_FROM_MMX_MODE();
4265 IEM_MC_ADVANCE_RIP();
4266 IEM_MC_END();
4267 return VINF_SUCCESS;
4268}
4269
4270/* Opcode 0x66 0x0f 0x77 - invalid */
4271/* Opcode 0xf3 0x0f 0x77 - invalid */
4272/* Opcode 0xf2 0x0f 0x77 - invalid */
4273
4274/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4275FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4276/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4277FNIEMOP_STUB(iemOp_AmdGrp17);
4278/* Opcode 0xf3 0x0f 0x78 - invalid */
4279/* Opcode 0xf2 0x0f 0x78 - invalid */
4280
4281/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4282FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4283/* Opcode 0x66 0x0f 0x79 - invalid */
4284/* Opcode 0xf3 0x0f 0x79 - invalid */
4285/* Opcode 0xf2 0x0f 0x79 - invalid */
4286
4287/* Opcode 0x0f 0x7a - invalid */
4288/* Opcode 0x66 0x0f 0x7a - invalid */
4289/* Opcode 0xf3 0x0f 0x7a - invalid */
4290/* Opcode 0xf2 0x0f 0x7a - invalid */
4291
4292/* Opcode 0x0f 0x7b - invalid */
4293/* Opcode 0x66 0x0f 0x7b - invalid */
4294/* Opcode 0xf3 0x0f 0x7b - invalid */
4295/* Opcode 0xf2 0x0f 0x7b - invalid */
4296
4297/* Opcode 0x0f 0x7c - invalid */
4298/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4299FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4300/* Opcode 0xf3 0x0f 0x7c - invalid */
4301/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4302FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4303
4304/* Opcode 0x0f 0x7d - invalid */
4305/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4306FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4307/* Opcode 0xf3 0x0f 0x7d - invalid */
4308/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4309FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4310
4311
4312/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4313FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4314{
4315 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4316 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4317 {
4318 /**
4319 * @opcode 0x7e
4320 * @opcodesub rex.w=1
4321 * @oppfx none
4322 * @opcpuid mmx
4323 * @opgroup og_mmx_datamove
4324 * @opxcpttype 5
4325 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
4326 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
4327 */
4328 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4329 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4330 {
4331 /* greg64, MMX */
4332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4333 IEM_MC_BEGIN(0, 1);
4334 IEM_MC_LOCAL(uint64_t, u64Tmp);
4335
4336 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4337 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4338
4339 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4340 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4341 IEM_MC_FPU_TO_MMX_MODE();
4342
4343 IEM_MC_ADVANCE_RIP();
4344 IEM_MC_END();
4345 }
4346 else
4347 {
4348 /* [mem64], MMX */
4349 IEM_MC_BEGIN(0, 2);
4350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4351 IEM_MC_LOCAL(uint64_t, u64Tmp);
4352
4353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4355 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4356 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4357
4358 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4359 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4360 IEM_MC_FPU_TO_MMX_MODE();
4361
4362 IEM_MC_ADVANCE_RIP();
4363 IEM_MC_END();
4364 }
4365 }
4366 else
4367 {
4368 /**
4369 * @opdone
4370 * @opcode 0x7e
4371 * @opcodesub rex.w=0
4372 * @oppfx none
4373 * @opcpuid mmx
4374 * @opgroup og_mmx_datamove
4375 * @opxcpttype 5
4376 * @opfunction iemOp_movd_q_Pd_Ey
4377 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4378 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4379 */
4380 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4381 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4382 {
4383 /* greg32, MMX */
4384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4385 IEM_MC_BEGIN(0, 1);
4386 IEM_MC_LOCAL(uint32_t, u32Tmp);
4387
4388 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4389 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4390
4391 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4392 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4393 IEM_MC_FPU_TO_MMX_MODE();
4394
4395 IEM_MC_ADVANCE_RIP();
4396 IEM_MC_END();
4397 }
4398 else
4399 {
4400 /* [mem32], MMX */
4401 IEM_MC_BEGIN(0, 2);
4402 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4403 IEM_MC_LOCAL(uint32_t, u32Tmp);
4404
4405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4407 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4408 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4409
4410 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4411 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4412 IEM_MC_FPU_TO_MMX_MODE();
4413
4414 IEM_MC_ADVANCE_RIP();
4415 IEM_MC_END();
4416 }
4417 }
4418 return VINF_SUCCESS;
4419
4420}
4421
4422
4423FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4424{
4425 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4426 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4427 {
4428 /**
4429 * @opcode 0x7e
4430 * @opcodesub rex.w=1
4431 * @oppfx 0x66
4432 * @opcpuid sse2
4433 * @opgroup og_sse2_simdint_datamove
4434 * @opxcpttype 5
4435 * @optest 64-bit / op1=1 op2=2 -> op1=2
4436 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4437 */
4438 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4439 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4440 {
4441 /* greg64, XMM */
4442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4443 IEM_MC_BEGIN(0, 1);
4444 IEM_MC_LOCAL(uint64_t, u64Tmp);
4445
4446 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4447 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4448
4449 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4450 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4451
4452 IEM_MC_ADVANCE_RIP();
4453 IEM_MC_END();
4454 }
4455 else
4456 {
4457 /* [mem64], XMM */
4458 IEM_MC_BEGIN(0, 2);
4459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4460 IEM_MC_LOCAL(uint64_t, u64Tmp);
4461
4462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4464 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4465 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4466
4467 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4468 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4469
4470 IEM_MC_ADVANCE_RIP();
4471 IEM_MC_END();
4472 }
4473 }
4474 else
4475 {
4476 /**
4477 * @opdone
4478 * @opcode 0x7e
4479 * @opcodesub rex.w=0
4480 * @oppfx 0x66
4481 * @opcpuid sse2
4482 * @opgroup og_sse2_simdint_datamove
4483 * @opxcpttype 5
4484 * @opfunction iemOp_movd_q_Vy_Ey
4485 * @optest op1=1 op2=2 -> op1=2
4486 * @optest op1=0 op2=-42 -> op1=-42
4487 */
4488 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4489 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4490 {
4491 /* greg32, XMM */
4492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4493 IEM_MC_BEGIN(0, 1);
4494 IEM_MC_LOCAL(uint32_t, u32Tmp);
4495
4496 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4497 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4498
4499 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4500 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4501
4502 IEM_MC_ADVANCE_RIP();
4503 IEM_MC_END();
4504 }
4505 else
4506 {
4507 /* [mem32], XMM */
4508 IEM_MC_BEGIN(0, 2);
4509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4510 IEM_MC_LOCAL(uint32_t, u32Tmp);
4511
4512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4514 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4515 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4516
4517 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4518 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4519
4520 IEM_MC_ADVANCE_RIP();
4521 IEM_MC_END();
4522 }
4523 }
4524 return VINF_SUCCESS;
4525
4526}
4527
4528/**
4529 * @opcode 0x7e
4530 * @oppfx 0xf3
4531 * @opcpuid sse2
4532 * @opgroup og_sse2_pcksclr_datamove
4533 * @opxcpttype none
4534 * @optest op1=1 op2=2 -> op1=2
4535 * @optest op1=0 op2=-42 -> op1=-42
4536 */
4537FNIEMOP_DEF(iemOp_movq_Vq_Wq)
4538{
4539 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4540 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4541 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4542 {
4543 /*
4544 * Register, register.
4545 */
4546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4547 IEM_MC_BEGIN(0, 2);
4548 IEM_MC_LOCAL(uint64_t, uSrc);
4549
4550 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4551 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4552
4553 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4554 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4555
4556 IEM_MC_ADVANCE_RIP();
4557 IEM_MC_END();
4558 }
4559 else
4560 {
4561 /*
4562 * Memory, register.
4563 */
4564 IEM_MC_BEGIN(0, 2);
4565 IEM_MC_LOCAL(uint64_t, uSrc);
4566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4567
4568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4570 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4571 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4572
4573 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4574 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4575
4576 IEM_MC_ADVANCE_RIP();
4577 IEM_MC_END();
4578 }
4579 return VINF_SUCCESS;
4580}
4581
4582/* Opcode 0xf2 0x0f 0x7e - invalid */
4583
4584
4585/** Opcode 0x0f 0x7f - movq Qq, Pq */
4586FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4587{
4588 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4589 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4590 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4591 {
4592 /*
4593 * Register, register.
4594 */
4595 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4596 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4598 IEM_MC_BEGIN(0, 1);
4599 IEM_MC_LOCAL(uint64_t, u64Tmp);
4600 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4601 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4602 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4603 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4604 IEM_MC_ADVANCE_RIP();
4605 IEM_MC_END();
4606 }
4607 else
4608 {
4609 /*
4610 * Register, memory.
4611 */
4612 IEM_MC_BEGIN(0, 2);
4613 IEM_MC_LOCAL(uint64_t, u64Tmp);
4614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4615
4616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4618 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4619 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4620
4621 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4622 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4623
4624 IEM_MC_ADVANCE_RIP();
4625 IEM_MC_END();
4626 }
4627 return VINF_SUCCESS;
4628}
4629
4630/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4631FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4632{
4633 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4634 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4635 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4636 {
4637 /*
4638 * Register, register.
4639 */
4640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4641 IEM_MC_BEGIN(0, 0);
4642 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4643 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4644 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4645 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4646 IEM_MC_ADVANCE_RIP();
4647 IEM_MC_END();
4648 }
4649 else
4650 {
4651 /*
4652 * Register, memory.
4653 */
4654 IEM_MC_BEGIN(0, 2);
4655 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4656 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4657
4658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4660 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4661 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4662
4663 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4664 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4665
4666 IEM_MC_ADVANCE_RIP();
4667 IEM_MC_END();
4668 }
4669 return VINF_SUCCESS;
4670}
4671
4672/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4673FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4674{
4675 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4676 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4677 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4678 {
4679 /*
4680 * Register, register.
4681 */
4682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4683 IEM_MC_BEGIN(0, 0);
4684 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4685 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4686 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4687 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4688 IEM_MC_ADVANCE_RIP();
4689 IEM_MC_END();
4690 }
4691 else
4692 {
4693 /*
4694 * Register, memory.
4695 */
4696 IEM_MC_BEGIN(0, 2);
4697 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4698 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4699
4700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4702 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4703 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4704
4705 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4706 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4707
4708 IEM_MC_ADVANCE_RIP();
4709 IEM_MC_END();
4710 }
4711 return VINF_SUCCESS;
4712}
4713
4714/* Opcode 0xf2 0x0f 0x7f - invalid */
4715
4716
4717
4718/** Opcode 0x0f 0x80. */
4719FNIEMOP_DEF(iemOp_jo_Jv)
4720{
4721 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4722 IEMOP_HLP_MIN_386();
4723 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4724 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4725 {
4726 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4728
4729 IEM_MC_BEGIN(0, 0);
4730 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4731 IEM_MC_REL_JMP_S16(i16Imm);
4732 } IEM_MC_ELSE() {
4733 IEM_MC_ADVANCE_RIP();
4734 } IEM_MC_ENDIF();
4735 IEM_MC_END();
4736 }
4737 else
4738 {
4739 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4741
4742 IEM_MC_BEGIN(0, 0);
4743 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4744 IEM_MC_REL_JMP_S32(i32Imm);
4745 } IEM_MC_ELSE() {
4746 IEM_MC_ADVANCE_RIP();
4747 } IEM_MC_ENDIF();
4748 IEM_MC_END();
4749 }
4750 return VINF_SUCCESS;
4751}
4752
4753
4754/** Opcode 0x0f 0x81. */
4755FNIEMOP_DEF(iemOp_jno_Jv)
4756{
4757 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4758 IEMOP_HLP_MIN_386();
4759 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4760 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4761 {
4762 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4764
4765 IEM_MC_BEGIN(0, 0);
4766 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4767 IEM_MC_ADVANCE_RIP();
4768 } IEM_MC_ELSE() {
4769 IEM_MC_REL_JMP_S16(i16Imm);
4770 } IEM_MC_ENDIF();
4771 IEM_MC_END();
4772 }
4773 else
4774 {
4775 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4777
4778 IEM_MC_BEGIN(0, 0);
4779 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4780 IEM_MC_ADVANCE_RIP();
4781 } IEM_MC_ELSE() {
4782 IEM_MC_REL_JMP_S32(i32Imm);
4783 } IEM_MC_ENDIF();
4784 IEM_MC_END();
4785 }
4786 return VINF_SUCCESS;
4787}
4788
4789
4790/** Opcode 0x0f 0x82. */
4791FNIEMOP_DEF(iemOp_jc_Jv)
4792{
4793 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4794 IEMOP_HLP_MIN_386();
4795 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4796 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4797 {
4798 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4800
4801 IEM_MC_BEGIN(0, 0);
4802 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4803 IEM_MC_REL_JMP_S16(i16Imm);
4804 } IEM_MC_ELSE() {
4805 IEM_MC_ADVANCE_RIP();
4806 } IEM_MC_ENDIF();
4807 IEM_MC_END();
4808 }
4809 else
4810 {
4811 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4813
4814 IEM_MC_BEGIN(0, 0);
4815 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4816 IEM_MC_REL_JMP_S32(i32Imm);
4817 } IEM_MC_ELSE() {
4818 IEM_MC_ADVANCE_RIP();
4819 } IEM_MC_ENDIF();
4820 IEM_MC_END();
4821 }
4822 return VINF_SUCCESS;
4823}
4824
4825
4826/** Opcode 0x0f 0x83. */
4827FNIEMOP_DEF(iemOp_jnc_Jv)
4828{
4829 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4830 IEMOP_HLP_MIN_386();
4831 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4832 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4833 {
4834 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4836
4837 IEM_MC_BEGIN(0, 0);
4838 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4839 IEM_MC_ADVANCE_RIP();
4840 } IEM_MC_ELSE() {
4841 IEM_MC_REL_JMP_S16(i16Imm);
4842 } IEM_MC_ENDIF();
4843 IEM_MC_END();
4844 }
4845 else
4846 {
4847 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4849
4850 IEM_MC_BEGIN(0, 0);
4851 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4852 IEM_MC_ADVANCE_RIP();
4853 } IEM_MC_ELSE() {
4854 IEM_MC_REL_JMP_S32(i32Imm);
4855 } IEM_MC_ENDIF();
4856 IEM_MC_END();
4857 }
4858 return VINF_SUCCESS;
4859}
4860
4861
4862/** Opcode 0x0f 0x84. */
4863FNIEMOP_DEF(iemOp_je_Jv)
4864{
4865 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4866 IEMOP_HLP_MIN_386();
4867 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4868 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4869 {
4870 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4872
4873 IEM_MC_BEGIN(0, 0);
4874 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4875 IEM_MC_REL_JMP_S16(i16Imm);
4876 } IEM_MC_ELSE() {
4877 IEM_MC_ADVANCE_RIP();
4878 } IEM_MC_ENDIF();
4879 IEM_MC_END();
4880 }
4881 else
4882 {
4883 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4885
4886 IEM_MC_BEGIN(0, 0);
4887 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4888 IEM_MC_REL_JMP_S32(i32Imm);
4889 } IEM_MC_ELSE() {
4890 IEM_MC_ADVANCE_RIP();
4891 } IEM_MC_ENDIF();
4892 IEM_MC_END();
4893 }
4894 return VINF_SUCCESS;
4895}
4896
4897
4898/** Opcode 0x0f 0x85. */
4899FNIEMOP_DEF(iemOp_jne_Jv)
4900{
4901 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4902 IEMOP_HLP_MIN_386();
4903 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4904 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4905 {
4906 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4908
4909 IEM_MC_BEGIN(0, 0);
4910 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4911 IEM_MC_ADVANCE_RIP();
4912 } IEM_MC_ELSE() {
4913 IEM_MC_REL_JMP_S16(i16Imm);
4914 } IEM_MC_ENDIF();
4915 IEM_MC_END();
4916 }
4917 else
4918 {
4919 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4921
4922 IEM_MC_BEGIN(0, 0);
4923 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4924 IEM_MC_ADVANCE_RIP();
4925 } IEM_MC_ELSE() {
4926 IEM_MC_REL_JMP_S32(i32Imm);
4927 } IEM_MC_ENDIF();
4928 IEM_MC_END();
4929 }
4930 return VINF_SUCCESS;
4931}
4932
4933
4934/** Opcode 0x0f 0x86. */
4935FNIEMOP_DEF(iemOp_jbe_Jv)
4936{
4937 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4938 IEMOP_HLP_MIN_386();
4939 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4940 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4941 {
4942 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4944
4945 IEM_MC_BEGIN(0, 0);
4946 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4947 IEM_MC_REL_JMP_S16(i16Imm);
4948 } IEM_MC_ELSE() {
4949 IEM_MC_ADVANCE_RIP();
4950 } IEM_MC_ENDIF();
4951 IEM_MC_END();
4952 }
4953 else
4954 {
4955 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4957
4958 IEM_MC_BEGIN(0, 0);
4959 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4960 IEM_MC_REL_JMP_S32(i32Imm);
4961 } IEM_MC_ELSE() {
4962 IEM_MC_ADVANCE_RIP();
4963 } IEM_MC_ENDIF();
4964 IEM_MC_END();
4965 }
4966 return VINF_SUCCESS;
4967}
4968
4969
4970/** Opcode 0x0f 0x87. */
4971FNIEMOP_DEF(iemOp_jnbe_Jv)
4972{
4973 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4974 IEMOP_HLP_MIN_386();
4975 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4976 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4977 {
4978 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4980
4981 IEM_MC_BEGIN(0, 0);
4982 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4983 IEM_MC_ADVANCE_RIP();
4984 } IEM_MC_ELSE() {
4985 IEM_MC_REL_JMP_S16(i16Imm);
4986 } IEM_MC_ENDIF();
4987 IEM_MC_END();
4988 }
4989 else
4990 {
4991 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4993
4994 IEM_MC_BEGIN(0, 0);
4995 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4996 IEM_MC_ADVANCE_RIP();
4997 } IEM_MC_ELSE() {
4998 IEM_MC_REL_JMP_S32(i32Imm);
4999 } IEM_MC_ENDIF();
5000 IEM_MC_END();
5001 }
5002 return VINF_SUCCESS;
5003}
5004
5005
5006/** Opcode 0x0f 0x88. */
5007FNIEMOP_DEF(iemOp_js_Jv)
5008{
5009 IEMOP_MNEMONIC(js_Jv, "js Jv");
5010 IEMOP_HLP_MIN_386();
5011 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5012 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5013 {
5014 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5016
5017 IEM_MC_BEGIN(0, 0);
5018 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5019 IEM_MC_REL_JMP_S16(i16Imm);
5020 } IEM_MC_ELSE() {
5021 IEM_MC_ADVANCE_RIP();
5022 } IEM_MC_ENDIF();
5023 IEM_MC_END();
5024 }
5025 else
5026 {
5027 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5029
5030 IEM_MC_BEGIN(0, 0);
5031 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5032 IEM_MC_REL_JMP_S32(i32Imm);
5033 } IEM_MC_ELSE() {
5034 IEM_MC_ADVANCE_RIP();
5035 } IEM_MC_ENDIF();
5036 IEM_MC_END();
5037 }
5038 return VINF_SUCCESS;
5039}
5040
5041
5042/** Opcode 0x0f 0x89. */
5043FNIEMOP_DEF(iemOp_jns_Jv)
5044{
5045 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
5046 IEMOP_HLP_MIN_386();
5047 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5048 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5049 {
5050 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5052
5053 IEM_MC_BEGIN(0, 0);
5054 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5055 IEM_MC_ADVANCE_RIP();
5056 } IEM_MC_ELSE() {
5057 IEM_MC_REL_JMP_S16(i16Imm);
5058 } IEM_MC_ENDIF();
5059 IEM_MC_END();
5060 }
5061 else
5062 {
5063 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5065
5066 IEM_MC_BEGIN(0, 0);
5067 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5068 IEM_MC_ADVANCE_RIP();
5069 } IEM_MC_ELSE() {
5070 IEM_MC_REL_JMP_S32(i32Imm);
5071 } IEM_MC_ENDIF();
5072 IEM_MC_END();
5073 }
5074 return VINF_SUCCESS;
5075}
5076
5077
5078/** Opcode 0x0f 0x8a. */
5079FNIEMOP_DEF(iemOp_jp_Jv)
5080{
5081 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
5082 IEMOP_HLP_MIN_386();
5083 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5084 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5085 {
5086 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5088
5089 IEM_MC_BEGIN(0, 0);
5090 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5091 IEM_MC_REL_JMP_S16(i16Imm);
5092 } IEM_MC_ELSE() {
5093 IEM_MC_ADVANCE_RIP();
5094 } IEM_MC_ENDIF();
5095 IEM_MC_END();
5096 }
5097 else
5098 {
5099 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5101
5102 IEM_MC_BEGIN(0, 0);
5103 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5104 IEM_MC_REL_JMP_S32(i32Imm);
5105 } IEM_MC_ELSE() {
5106 IEM_MC_ADVANCE_RIP();
5107 } IEM_MC_ENDIF();
5108 IEM_MC_END();
5109 }
5110 return VINF_SUCCESS;
5111}
5112
5113
5114/** Opcode 0x0f 0x8b. */
5115FNIEMOP_DEF(iemOp_jnp_Jv)
5116{
5117 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
5118 IEMOP_HLP_MIN_386();
5119 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5120 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5121 {
5122 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5124
5125 IEM_MC_BEGIN(0, 0);
5126 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5127 IEM_MC_ADVANCE_RIP();
5128 } IEM_MC_ELSE() {
5129 IEM_MC_REL_JMP_S16(i16Imm);
5130 } IEM_MC_ENDIF();
5131 IEM_MC_END();
5132 }
5133 else
5134 {
5135 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5137
5138 IEM_MC_BEGIN(0, 0);
5139 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5140 IEM_MC_ADVANCE_RIP();
5141 } IEM_MC_ELSE() {
5142 IEM_MC_REL_JMP_S32(i32Imm);
5143 } IEM_MC_ENDIF();
5144 IEM_MC_END();
5145 }
5146 return VINF_SUCCESS;
5147}
5148
5149
5150/** Opcode 0x0f 0x8c. */
5151FNIEMOP_DEF(iemOp_jl_Jv)
5152{
5153 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
5154 IEMOP_HLP_MIN_386();
5155 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5156 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5157 {
5158 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5160
5161 IEM_MC_BEGIN(0, 0);
5162 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5163 IEM_MC_REL_JMP_S16(i16Imm);
5164 } IEM_MC_ELSE() {
5165 IEM_MC_ADVANCE_RIP();
5166 } IEM_MC_ENDIF();
5167 IEM_MC_END();
5168 }
5169 else
5170 {
5171 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5173
5174 IEM_MC_BEGIN(0, 0);
5175 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5176 IEM_MC_REL_JMP_S32(i32Imm);
5177 } IEM_MC_ELSE() {
5178 IEM_MC_ADVANCE_RIP();
5179 } IEM_MC_ENDIF();
5180 IEM_MC_END();
5181 }
5182 return VINF_SUCCESS;
5183}
5184
5185
5186/** Opcode 0x0f 0x8d. */
5187FNIEMOP_DEF(iemOp_jnl_Jv)
5188{
5189 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5190 IEMOP_HLP_MIN_386();
5191 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5192 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5193 {
5194 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5196
5197 IEM_MC_BEGIN(0, 0);
5198 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5199 IEM_MC_ADVANCE_RIP();
5200 } IEM_MC_ELSE() {
5201 IEM_MC_REL_JMP_S16(i16Imm);
5202 } IEM_MC_ENDIF();
5203 IEM_MC_END();
5204 }
5205 else
5206 {
5207 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5209
5210 IEM_MC_BEGIN(0, 0);
5211 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5212 IEM_MC_ADVANCE_RIP();
5213 } IEM_MC_ELSE() {
5214 IEM_MC_REL_JMP_S32(i32Imm);
5215 } IEM_MC_ENDIF();
5216 IEM_MC_END();
5217 }
5218 return VINF_SUCCESS;
5219}
5220
5221
5222/** Opcode 0x0f 0x8e. */
5223FNIEMOP_DEF(iemOp_jle_Jv)
5224{
5225 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5226 IEMOP_HLP_MIN_386();
5227 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5228 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5229 {
5230 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5232
5233 IEM_MC_BEGIN(0, 0);
5234 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5235 IEM_MC_REL_JMP_S16(i16Imm);
5236 } IEM_MC_ELSE() {
5237 IEM_MC_ADVANCE_RIP();
5238 } IEM_MC_ENDIF();
5239 IEM_MC_END();
5240 }
5241 else
5242 {
5243 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5245
5246 IEM_MC_BEGIN(0, 0);
5247 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5248 IEM_MC_REL_JMP_S32(i32Imm);
5249 } IEM_MC_ELSE() {
5250 IEM_MC_ADVANCE_RIP();
5251 } IEM_MC_ENDIF();
5252 IEM_MC_END();
5253 }
5254 return VINF_SUCCESS;
5255}
5256
5257
5258/** Opcode 0x0f 0x8f. */
5259FNIEMOP_DEF(iemOp_jnle_Jv)
5260{
5261 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5262 IEMOP_HLP_MIN_386();
5263 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5264 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5265 {
5266 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5268
5269 IEM_MC_BEGIN(0, 0);
5270 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5271 IEM_MC_ADVANCE_RIP();
5272 } IEM_MC_ELSE() {
5273 IEM_MC_REL_JMP_S16(i16Imm);
5274 } IEM_MC_ENDIF();
5275 IEM_MC_END();
5276 }
5277 else
5278 {
5279 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5281
5282 IEM_MC_BEGIN(0, 0);
5283 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5284 IEM_MC_ADVANCE_RIP();
5285 } IEM_MC_ELSE() {
5286 IEM_MC_REL_JMP_S32(i32Imm);
5287 } IEM_MC_ENDIF();
5288 IEM_MC_END();
5289 }
5290 return VINF_SUCCESS;
5291}
5292
5293
5294/** Opcode 0x0f 0x90. */
5295FNIEMOP_DEF(iemOp_seto_Eb)
5296{
5297 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5298 IEMOP_HLP_MIN_386();
5299 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5300
5301 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5302 * any way. AMD says it's "unused", whatever that means. We're
5303 * ignoring for now. */
5304 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5305 {
5306 /* register target */
5307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5308 IEM_MC_BEGIN(0, 0);
5309 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5310 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5311 } IEM_MC_ELSE() {
5312 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5313 } IEM_MC_ENDIF();
5314 IEM_MC_ADVANCE_RIP();
5315 IEM_MC_END();
5316 }
5317 else
5318 {
5319 /* memory target */
5320 IEM_MC_BEGIN(0, 1);
5321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5324 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5325 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5326 } IEM_MC_ELSE() {
5327 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5328 } IEM_MC_ENDIF();
5329 IEM_MC_ADVANCE_RIP();
5330 IEM_MC_END();
5331 }
5332 return VINF_SUCCESS;
5333}
5334
5335
5336/** Opcode 0x0f 0x91. */
5337FNIEMOP_DEF(iemOp_setno_Eb)
5338{
5339 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5340 IEMOP_HLP_MIN_386();
5341 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5342
5343 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5344 * any way. AMD says it's "unused", whatever that means. We're
5345 * ignoring for now. */
5346 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5347 {
5348 /* register target */
5349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5350 IEM_MC_BEGIN(0, 0);
5351 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5352 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5353 } IEM_MC_ELSE() {
5354 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5355 } IEM_MC_ENDIF();
5356 IEM_MC_ADVANCE_RIP();
5357 IEM_MC_END();
5358 }
5359 else
5360 {
5361 /* memory target */
5362 IEM_MC_BEGIN(0, 1);
5363 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5366 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5367 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5368 } IEM_MC_ELSE() {
5369 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5370 } IEM_MC_ENDIF();
5371 IEM_MC_ADVANCE_RIP();
5372 IEM_MC_END();
5373 }
5374 return VINF_SUCCESS;
5375}
5376
5377
5378/** Opcode 0x0f 0x92. */
5379FNIEMOP_DEF(iemOp_setc_Eb)
5380{
5381 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5382 IEMOP_HLP_MIN_386();
5383 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5384
5385 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5386 * any way. AMD says it's "unused", whatever that means. We're
5387 * ignoring for now. */
5388 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5389 {
5390 /* register target */
5391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5392 IEM_MC_BEGIN(0, 0);
5393 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5394 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5395 } IEM_MC_ELSE() {
5396 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5397 } IEM_MC_ENDIF();
5398 IEM_MC_ADVANCE_RIP();
5399 IEM_MC_END();
5400 }
5401 else
5402 {
5403 /* memory target */
5404 IEM_MC_BEGIN(0, 1);
5405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5408 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5409 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5410 } IEM_MC_ELSE() {
5411 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5412 } IEM_MC_ENDIF();
5413 IEM_MC_ADVANCE_RIP();
5414 IEM_MC_END();
5415 }
5416 return VINF_SUCCESS;
5417}
5418
5419
5420/** Opcode 0x0f 0x93. */
5421FNIEMOP_DEF(iemOp_setnc_Eb)
5422{
5423 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5424 IEMOP_HLP_MIN_386();
5425 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5426
5427 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5428 * any way. AMD says it's "unused", whatever that means. We're
5429 * ignoring for now. */
5430 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5431 {
5432 /* register target */
5433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5434 IEM_MC_BEGIN(0, 0);
5435 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5436 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5437 } IEM_MC_ELSE() {
5438 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5439 } IEM_MC_ENDIF();
5440 IEM_MC_ADVANCE_RIP();
5441 IEM_MC_END();
5442 }
5443 else
5444 {
5445 /* memory target */
5446 IEM_MC_BEGIN(0, 1);
5447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5450 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5451 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5452 } IEM_MC_ELSE() {
5453 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5454 } IEM_MC_ENDIF();
5455 IEM_MC_ADVANCE_RIP();
5456 IEM_MC_END();
5457 }
5458 return VINF_SUCCESS;
5459}
5460
5461
5462/** Opcode 0x0f 0x94. */
5463FNIEMOP_DEF(iemOp_sete_Eb)
5464{
5465 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5466 IEMOP_HLP_MIN_386();
5467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5468
5469 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5470 * any way. AMD says it's "unused", whatever that means. We're
5471 * ignoring for now. */
5472 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5473 {
5474 /* register target */
5475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5476 IEM_MC_BEGIN(0, 0);
5477 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5478 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5479 } IEM_MC_ELSE() {
5480 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5481 } IEM_MC_ENDIF();
5482 IEM_MC_ADVANCE_RIP();
5483 IEM_MC_END();
5484 }
5485 else
5486 {
5487 /* memory target */
5488 IEM_MC_BEGIN(0, 1);
5489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5492 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5493 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5494 } IEM_MC_ELSE() {
5495 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5496 } IEM_MC_ENDIF();
5497 IEM_MC_ADVANCE_RIP();
5498 IEM_MC_END();
5499 }
5500 return VINF_SUCCESS;
5501}
5502
5503
5504/** Opcode 0x0f 0x95. */
5505FNIEMOP_DEF(iemOp_setne_Eb)
5506{
5507 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5508 IEMOP_HLP_MIN_386();
5509 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5510
5511 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5512 * any way. AMD says it's "unused", whatever that means. We're
5513 * ignoring for now. */
5514 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5515 {
5516 /* register target */
5517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5518 IEM_MC_BEGIN(0, 0);
5519 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5520 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5521 } IEM_MC_ELSE() {
5522 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5523 } IEM_MC_ENDIF();
5524 IEM_MC_ADVANCE_RIP();
5525 IEM_MC_END();
5526 }
5527 else
5528 {
5529 /* memory target */
5530 IEM_MC_BEGIN(0, 1);
5531 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5534 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5535 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5536 } IEM_MC_ELSE() {
5537 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5538 } IEM_MC_ENDIF();
5539 IEM_MC_ADVANCE_RIP();
5540 IEM_MC_END();
5541 }
5542 return VINF_SUCCESS;
5543}
5544
5545
5546/** Opcode 0x0f 0x96. */
5547FNIEMOP_DEF(iemOp_setbe_Eb)
5548{
5549 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5550 IEMOP_HLP_MIN_386();
5551 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5552
5553 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5554 * any way. AMD says it's "unused", whatever that means. We're
5555 * ignoring for now. */
5556 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5557 {
5558 /* register target */
5559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5560 IEM_MC_BEGIN(0, 0);
5561 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5562 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5563 } IEM_MC_ELSE() {
5564 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5565 } IEM_MC_ENDIF();
5566 IEM_MC_ADVANCE_RIP();
5567 IEM_MC_END();
5568 }
5569 else
5570 {
5571 /* memory target */
5572 IEM_MC_BEGIN(0, 1);
5573 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5574 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5576 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5577 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5578 } IEM_MC_ELSE() {
5579 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5580 } IEM_MC_ENDIF();
5581 IEM_MC_ADVANCE_RIP();
5582 IEM_MC_END();
5583 }
5584 return VINF_SUCCESS;
5585}
5586
5587
5588/** Opcode 0x0f 0x97. */
5589FNIEMOP_DEF(iemOp_setnbe_Eb)
5590{
5591 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5592 IEMOP_HLP_MIN_386();
5593 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5594
5595 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5596 * any way. AMD says it's "unused", whatever that means. We're
5597 * ignoring for now. */
5598 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5599 {
5600 /* register target */
5601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5602 IEM_MC_BEGIN(0, 0);
5603 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5604 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5605 } IEM_MC_ELSE() {
5606 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5607 } IEM_MC_ENDIF();
5608 IEM_MC_ADVANCE_RIP();
5609 IEM_MC_END();
5610 }
5611 else
5612 {
5613 /* memory target */
5614 IEM_MC_BEGIN(0, 1);
5615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5618 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5619 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5620 } IEM_MC_ELSE() {
5621 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5622 } IEM_MC_ENDIF();
5623 IEM_MC_ADVANCE_RIP();
5624 IEM_MC_END();
5625 }
5626 return VINF_SUCCESS;
5627}
5628
5629
5630/** Opcode 0x0f 0x98. */
5631FNIEMOP_DEF(iemOp_sets_Eb)
5632{
5633 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5634 IEMOP_HLP_MIN_386();
5635 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5636
5637 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5638 * any way. AMD says it's "unused", whatever that means. We're
5639 * ignoring for now. */
5640 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5641 {
5642 /* register target */
5643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5644 IEM_MC_BEGIN(0, 0);
5645 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5646 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5647 } IEM_MC_ELSE() {
5648 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5649 } IEM_MC_ENDIF();
5650 IEM_MC_ADVANCE_RIP();
5651 IEM_MC_END();
5652 }
5653 else
5654 {
5655 /* memory target */
5656 IEM_MC_BEGIN(0, 1);
5657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5660 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5661 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5662 } IEM_MC_ELSE() {
5663 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5664 } IEM_MC_ENDIF();
5665 IEM_MC_ADVANCE_RIP();
5666 IEM_MC_END();
5667 }
5668 return VINF_SUCCESS;
5669}
5670
5671
5672/** Opcode 0x0f 0x99. */
5673FNIEMOP_DEF(iemOp_setns_Eb)
5674{
5675 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5676 IEMOP_HLP_MIN_386();
5677 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5678
5679 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5680 * any way. AMD says it's "unused", whatever that means. We're
5681 * ignoring for now. */
5682 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5683 {
5684 /* register target */
5685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5686 IEM_MC_BEGIN(0, 0);
5687 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5688 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5689 } IEM_MC_ELSE() {
5690 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5691 } IEM_MC_ENDIF();
5692 IEM_MC_ADVANCE_RIP();
5693 IEM_MC_END();
5694 }
5695 else
5696 {
5697 /* memory target */
5698 IEM_MC_BEGIN(0, 1);
5699 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5702 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5703 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5704 } IEM_MC_ELSE() {
5705 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5706 } IEM_MC_ENDIF();
5707 IEM_MC_ADVANCE_RIP();
5708 IEM_MC_END();
5709 }
5710 return VINF_SUCCESS;
5711}
5712
5713
5714/** Opcode 0x0f 0x9a. */
5715FNIEMOP_DEF(iemOp_setp_Eb)
5716{
5717 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5718 IEMOP_HLP_MIN_386();
5719 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5720
5721 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5722 * any way. AMD says it's "unused", whatever that means. We're
5723 * ignoring for now. */
5724 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5725 {
5726 /* register target */
5727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5728 IEM_MC_BEGIN(0, 0);
5729 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5730 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5731 } IEM_MC_ELSE() {
5732 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5733 } IEM_MC_ENDIF();
5734 IEM_MC_ADVANCE_RIP();
5735 IEM_MC_END();
5736 }
5737 else
5738 {
5739 /* memory target */
5740 IEM_MC_BEGIN(0, 1);
5741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5744 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5745 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5746 } IEM_MC_ELSE() {
5747 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5748 } IEM_MC_ENDIF();
5749 IEM_MC_ADVANCE_RIP();
5750 IEM_MC_END();
5751 }
5752 return VINF_SUCCESS;
5753}
5754
5755
5756/** Opcode 0x0f 0x9b. */
5757FNIEMOP_DEF(iemOp_setnp_Eb)
5758{
5759 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5760 IEMOP_HLP_MIN_386();
5761 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5762
5763 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5764 * any way. AMD says it's "unused", whatever that means. We're
5765 * ignoring for now. */
5766 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5767 {
5768 /* register target */
5769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5770 IEM_MC_BEGIN(0, 0);
5771 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5772 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5773 } IEM_MC_ELSE() {
5774 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5775 } IEM_MC_ENDIF();
5776 IEM_MC_ADVANCE_RIP();
5777 IEM_MC_END();
5778 }
5779 else
5780 {
5781 /* memory target */
5782 IEM_MC_BEGIN(0, 1);
5783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5786 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5787 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5788 } IEM_MC_ELSE() {
5789 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5790 } IEM_MC_ENDIF();
5791 IEM_MC_ADVANCE_RIP();
5792 IEM_MC_END();
5793 }
5794 return VINF_SUCCESS;
5795}
5796
5797
5798/** Opcode 0x0f 0x9c. */
5799FNIEMOP_DEF(iemOp_setl_Eb)
5800{
5801 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5802 IEMOP_HLP_MIN_386();
5803 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5804
5805 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5806 * any way. AMD says it's "unused", whatever that means. We're
5807 * ignoring for now. */
5808 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5809 {
5810 /* register target */
5811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5812 IEM_MC_BEGIN(0, 0);
5813 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5814 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5815 } IEM_MC_ELSE() {
5816 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5817 } IEM_MC_ENDIF();
5818 IEM_MC_ADVANCE_RIP();
5819 IEM_MC_END();
5820 }
5821 else
5822 {
5823 /* memory target */
5824 IEM_MC_BEGIN(0, 1);
5825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5828 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5829 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5830 } IEM_MC_ELSE() {
5831 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5832 } IEM_MC_ENDIF();
5833 IEM_MC_ADVANCE_RIP();
5834 IEM_MC_END();
5835 }
5836 return VINF_SUCCESS;
5837}
5838
5839
5840/** Opcode 0x0f 0x9d. */
5841FNIEMOP_DEF(iemOp_setnl_Eb)
5842{
5843 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5844 IEMOP_HLP_MIN_386();
5845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5846
5847 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5848 * any way. AMD says it's "unused", whatever that means. We're
5849 * ignoring for now. */
5850 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5851 {
5852 /* register target */
5853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5854 IEM_MC_BEGIN(0, 0);
5855 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5856 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5857 } IEM_MC_ELSE() {
5858 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5859 } IEM_MC_ENDIF();
5860 IEM_MC_ADVANCE_RIP();
5861 IEM_MC_END();
5862 }
5863 else
5864 {
5865 /* memory target */
5866 IEM_MC_BEGIN(0, 1);
5867 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5870 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5871 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5872 } IEM_MC_ELSE() {
5873 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5874 } IEM_MC_ENDIF();
5875 IEM_MC_ADVANCE_RIP();
5876 IEM_MC_END();
5877 }
5878 return VINF_SUCCESS;
5879}
5880
5881
5882/** Opcode 0x0f 0x9e. */
5883FNIEMOP_DEF(iemOp_setle_Eb)
5884{
5885 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5886 IEMOP_HLP_MIN_386();
5887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5888
5889 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5890 * any way. AMD says it's "unused", whatever that means. We're
5891 * ignoring for now. */
5892 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5893 {
5894 /* register target */
5895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5896 IEM_MC_BEGIN(0, 0);
5897 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5898 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5899 } IEM_MC_ELSE() {
5900 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5901 } IEM_MC_ENDIF();
5902 IEM_MC_ADVANCE_RIP();
5903 IEM_MC_END();
5904 }
5905 else
5906 {
5907 /* memory target */
5908 IEM_MC_BEGIN(0, 1);
5909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5912 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5913 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5914 } IEM_MC_ELSE() {
5915 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5916 } IEM_MC_ENDIF();
5917 IEM_MC_ADVANCE_RIP();
5918 IEM_MC_END();
5919 }
5920 return VINF_SUCCESS;
5921}
5922
5923
5924/** Opcode 0x0f 0x9f. */
5925FNIEMOP_DEF(iemOp_setnle_Eb)
5926{
5927 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5928 IEMOP_HLP_MIN_386();
5929 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5930
5931 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5932 * any way. AMD says it's "unused", whatever that means. We're
5933 * ignoring for now. */
5934 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5935 {
5936 /* register target */
5937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5938 IEM_MC_BEGIN(0, 0);
5939 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5940 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5941 } IEM_MC_ELSE() {
5942 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5943 } IEM_MC_ENDIF();
5944 IEM_MC_ADVANCE_RIP();
5945 IEM_MC_END();
5946 }
5947 else
5948 {
5949 /* memory target */
5950 IEM_MC_BEGIN(0, 1);
5951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5954 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5955 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5956 } IEM_MC_ELSE() {
5957 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5958 } IEM_MC_ENDIF();
5959 IEM_MC_ADVANCE_RIP();
5960 IEM_MC_END();
5961 }
5962 return VINF_SUCCESS;
5963}
5964
5965
5966/**
5967 * Common 'push segment-register' helper.
5968 */
5969FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5970{
5971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5972 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5973 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5974
5975 switch (pVCpu->iem.s.enmEffOpSize)
5976 {
5977 case IEMMODE_16BIT:
5978 IEM_MC_BEGIN(0, 1);
5979 IEM_MC_LOCAL(uint16_t, u16Value);
5980 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5981 IEM_MC_PUSH_U16(u16Value);
5982 IEM_MC_ADVANCE_RIP();
5983 IEM_MC_END();
5984 break;
5985
5986 case IEMMODE_32BIT:
5987 IEM_MC_BEGIN(0, 1);
5988 IEM_MC_LOCAL(uint32_t, u32Value);
5989 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5990 IEM_MC_PUSH_U32_SREG(u32Value);
5991 IEM_MC_ADVANCE_RIP();
5992 IEM_MC_END();
5993 break;
5994
5995 case IEMMODE_64BIT:
5996 IEM_MC_BEGIN(0, 1);
5997 IEM_MC_LOCAL(uint64_t, u64Value);
5998 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5999 IEM_MC_PUSH_U64(u64Value);
6000 IEM_MC_ADVANCE_RIP();
6001 IEM_MC_END();
6002 break;
6003 }
6004
6005 return VINF_SUCCESS;
6006}
6007
6008
6009/** Opcode 0x0f 0xa0. */
6010FNIEMOP_DEF(iemOp_push_fs)
6011{
6012 IEMOP_MNEMONIC(push_fs, "push fs");
6013 IEMOP_HLP_MIN_386();
6014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6015 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
6016}
6017
6018
6019/** Opcode 0x0f 0xa1. */
6020FNIEMOP_DEF(iemOp_pop_fs)
6021{
6022 IEMOP_MNEMONIC(pop_fs, "pop fs");
6023 IEMOP_HLP_MIN_386();
6024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6025 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
6026}
6027
6028
6029/** Opcode 0x0f 0xa2. */
6030FNIEMOP_DEF(iemOp_cpuid)
6031{
6032 IEMOP_MNEMONIC(cpuid, "cpuid");
6033 IEMOP_HLP_MIN_486(); /* not all 486es. */
6034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6035 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
6036}
6037
6038
6039/**
6040 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
6041 * iemOp_bts_Ev_Gv.
6042 */
6043FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
6044{
6045 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6046 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6047
6048 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6049 {
6050 /* register destination. */
6051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6052 switch (pVCpu->iem.s.enmEffOpSize)
6053 {
6054 case IEMMODE_16BIT:
6055 IEM_MC_BEGIN(3, 0);
6056 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6057 IEM_MC_ARG(uint16_t, u16Src, 1);
6058 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6059
6060 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6061 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
6062 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6063 IEM_MC_REF_EFLAGS(pEFlags);
6064 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6065
6066 IEM_MC_ADVANCE_RIP();
6067 IEM_MC_END();
6068 return VINF_SUCCESS;
6069
6070 case IEMMODE_32BIT:
6071 IEM_MC_BEGIN(3, 0);
6072 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6073 IEM_MC_ARG(uint32_t, u32Src, 1);
6074 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6075
6076 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6077 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
6078 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6079 IEM_MC_REF_EFLAGS(pEFlags);
6080 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6081
6082 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6083 IEM_MC_ADVANCE_RIP();
6084 IEM_MC_END();
6085 return VINF_SUCCESS;
6086
6087 case IEMMODE_64BIT:
6088 IEM_MC_BEGIN(3, 0);
6089 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6090 IEM_MC_ARG(uint64_t, u64Src, 1);
6091 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6092
6093 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6094 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
6095 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6096 IEM_MC_REF_EFLAGS(pEFlags);
6097 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6098
6099 IEM_MC_ADVANCE_RIP();
6100 IEM_MC_END();
6101 return VINF_SUCCESS;
6102
6103 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6104 }
6105 }
6106 else
6107 {
6108 /* memory destination. */
6109
6110 uint32_t fAccess;
6111 if (pImpl->pfnLockedU16)
6112 fAccess = IEM_ACCESS_DATA_RW;
6113 else /* BT */
6114 fAccess = IEM_ACCESS_DATA_R;
6115
6116 /** @todo test negative bit offsets! */
6117 switch (pVCpu->iem.s.enmEffOpSize)
6118 {
6119 case IEMMODE_16BIT:
6120 IEM_MC_BEGIN(3, 2);
6121 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6122 IEM_MC_ARG(uint16_t, u16Src, 1);
6123 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6125 IEM_MC_LOCAL(int16_t, i16AddrAdj);
6126
6127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6128 if (pImpl->pfnLockedU16)
6129 IEMOP_HLP_DONE_DECODING();
6130 else
6131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6132 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6133 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
6134 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
6135 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
6136 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
6137 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
6138 IEM_MC_FETCH_EFLAGS(EFlags);
6139
6140 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6141 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6142 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6143 else
6144 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6145 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6146
6147 IEM_MC_COMMIT_EFLAGS(EFlags);
6148 IEM_MC_ADVANCE_RIP();
6149 IEM_MC_END();
6150 return VINF_SUCCESS;
6151
6152 case IEMMODE_32BIT:
6153 IEM_MC_BEGIN(3, 2);
6154 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6155 IEM_MC_ARG(uint32_t, u32Src, 1);
6156 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6158 IEM_MC_LOCAL(int32_t, i32AddrAdj);
6159
6160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6161 if (pImpl->pfnLockedU16)
6162 IEMOP_HLP_DONE_DECODING();
6163 else
6164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6165 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6166 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
6167 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
6168 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
6169 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
6170 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
6171 IEM_MC_FETCH_EFLAGS(EFlags);
6172
6173 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6174 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6175 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6176 else
6177 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6178 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6179
6180 IEM_MC_COMMIT_EFLAGS(EFlags);
6181 IEM_MC_ADVANCE_RIP();
6182 IEM_MC_END();
6183 return VINF_SUCCESS;
6184
6185 case IEMMODE_64BIT:
6186 IEM_MC_BEGIN(3, 2);
6187 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6188 IEM_MC_ARG(uint64_t, u64Src, 1);
6189 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6191 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6192
6193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6194 if (pImpl->pfnLockedU16)
6195 IEMOP_HLP_DONE_DECODING();
6196 else
6197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6198 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6199 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6200 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6201 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6202 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6203 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6204 IEM_MC_FETCH_EFLAGS(EFlags);
6205
6206 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6207 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6208 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6209 else
6210 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6211 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6212
6213 IEM_MC_COMMIT_EFLAGS(EFlags);
6214 IEM_MC_ADVANCE_RIP();
6215 IEM_MC_END();
6216 return VINF_SUCCESS;
6217
6218 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6219 }
6220 }
6221}
6222
6223
6224/** Opcode 0x0f 0xa3. */
6225FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6226{
6227 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6228 IEMOP_HLP_MIN_386();
6229 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6230}
6231
6232
6233/**
6234 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6235 */
6236FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6237{
6238 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6239 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6240
6241 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6242 {
6243 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6245
6246 switch (pVCpu->iem.s.enmEffOpSize)
6247 {
6248 case IEMMODE_16BIT:
6249 IEM_MC_BEGIN(4, 0);
6250 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6251 IEM_MC_ARG(uint16_t, u16Src, 1);
6252 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6253 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6254
6255 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6256 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6257 IEM_MC_REF_EFLAGS(pEFlags);
6258 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6259
6260 IEM_MC_ADVANCE_RIP();
6261 IEM_MC_END();
6262 return VINF_SUCCESS;
6263
6264 case IEMMODE_32BIT:
6265 IEM_MC_BEGIN(4, 0);
6266 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6267 IEM_MC_ARG(uint32_t, u32Src, 1);
6268 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6269 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6270
6271 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6272 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6273 IEM_MC_REF_EFLAGS(pEFlags);
6274 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6275
6276 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6277 IEM_MC_ADVANCE_RIP();
6278 IEM_MC_END();
6279 return VINF_SUCCESS;
6280
6281 case IEMMODE_64BIT:
6282 IEM_MC_BEGIN(4, 0);
6283 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6284 IEM_MC_ARG(uint64_t, u64Src, 1);
6285 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6286 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6287
6288 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6289 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6290 IEM_MC_REF_EFLAGS(pEFlags);
6291 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6292
6293 IEM_MC_ADVANCE_RIP();
6294 IEM_MC_END();
6295 return VINF_SUCCESS;
6296
6297 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6298 }
6299 }
6300 else
6301 {
6302 switch (pVCpu->iem.s.enmEffOpSize)
6303 {
6304 case IEMMODE_16BIT:
6305 IEM_MC_BEGIN(4, 2);
6306 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6307 IEM_MC_ARG(uint16_t, u16Src, 1);
6308 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6309 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6310 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6311
6312 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6313 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6314 IEM_MC_ASSIGN(cShiftArg, cShift);
6315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6316 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6317 IEM_MC_FETCH_EFLAGS(EFlags);
6318 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6319 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6320
6321 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6322 IEM_MC_COMMIT_EFLAGS(EFlags);
6323 IEM_MC_ADVANCE_RIP();
6324 IEM_MC_END();
6325 return VINF_SUCCESS;
6326
6327 case IEMMODE_32BIT:
6328 IEM_MC_BEGIN(4, 2);
6329 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6330 IEM_MC_ARG(uint32_t, u32Src, 1);
6331 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6332 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6334
6335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6336 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6337 IEM_MC_ASSIGN(cShiftArg, cShift);
6338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6339 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6340 IEM_MC_FETCH_EFLAGS(EFlags);
6341 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6342 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6343
6344 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6345 IEM_MC_COMMIT_EFLAGS(EFlags);
6346 IEM_MC_ADVANCE_RIP();
6347 IEM_MC_END();
6348 return VINF_SUCCESS;
6349
6350 case IEMMODE_64BIT:
6351 IEM_MC_BEGIN(4, 2);
6352 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6353 IEM_MC_ARG(uint64_t, u64Src, 1);
6354 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6355 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6357
6358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6359 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6360 IEM_MC_ASSIGN(cShiftArg, cShift);
6361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6362 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6363 IEM_MC_FETCH_EFLAGS(EFlags);
6364 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6365 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6366
6367 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6368 IEM_MC_COMMIT_EFLAGS(EFlags);
6369 IEM_MC_ADVANCE_RIP();
6370 IEM_MC_END();
6371 return VINF_SUCCESS;
6372
6373 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6374 }
6375 }
6376}
6377
6378
6379/**
6380 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6381 */
6382FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6383{
6384 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6385 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6386
6387 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6388 {
6389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6390
6391 switch (pVCpu->iem.s.enmEffOpSize)
6392 {
6393 case IEMMODE_16BIT:
6394 IEM_MC_BEGIN(4, 0);
6395 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6396 IEM_MC_ARG(uint16_t, u16Src, 1);
6397 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6398 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6399
6400 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6401 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6402 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6403 IEM_MC_REF_EFLAGS(pEFlags);
6404 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6405
6406 IEM_MC_ADVANCE_RIP();
6407 IEM_MC_END();
6408 return VINF_SUCCESS;
6409
6410 case IEMMODE_32BIT:
6411 IEM_MC_BEGIN(4, 0);
6412 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6413 IEM_MC_ARG(uint32_t, u32Src, 1);
6414 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6415 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6416
6417 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6418 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6419 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6420 IEM_MC_REF_EFLAGS(pEFlags);
6421 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6422
6423 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6424 IEM_MC_ADVANCE_RIP();
6425 IEM_MC_END();
6426 return VINF_SUCCESS;
6427
6428 case IEMMODE_64BIT:
6429 IEM_MC_BEGIN(4, 0);
6430 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6431 IEM_MC_ARG(uint64_t, u64Src, 1);
6432 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6433 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6434
6435 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6436 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6437 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6438 IEM_MC_REF_EFLAGS(pEFlags);
6439 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6440
6441 IEM_MC_ADVANCE_RIP();
6442 IEM_MC_END();
6443 return VINF_SUCCESS;
6444
6445 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6446 }
6447 }
6448 else
6449 {
6450 switch (pVCpu->iem.s.enmEffOpSize)
6451 {
6452 case IEMMODE_16BIT:
6453 IEM_MC_BEGIN(4, 2);
6454 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6455 IEM_MC_ARG(uint16_t, u16Src, 1);
6456 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6457 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6459
6460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6462 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6463 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6464 IEM_MC_FETCH_EFLAGS(EFlags);
6465 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6466 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6467
6468 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6469 IEM_MC_COMMIT_EFLAGS(EFlags);
6470 IEM_MC_ADVANCE_RIP();
6471 IEM_MC_END();
6472 return VINF_SUCCESS;
6473
6474 case IEMMODE_32BIT:
6475 IEM_MC_BEGIN(4, 2);
6476 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6477 IEM_MC_ARG(uint32_t, u32Src, 1);
6478 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6479 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6481
6482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6484 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6485 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6486 IEM_MC_FETCH_EFLAGS(EFlags);
6487 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6488 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6489
6490 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6491 IEM_MC_COMMIT_EFLAGS(EFlags);
6492 IEM_MC_ADVANCE_RIP();
6493 IEM_MC_END();
6494 return VINF_SUCCESS;
6495
6496 case IEMMODE_64BIT:
6497 IEM_MC_BEGIN(4, 2);
6498 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6499 IEM_MC_ARG(uint64_t, u64Src, 1);
6500 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6501 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6503
6504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6506 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6507 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6508 IEM_MC_FETCH_EFLAGS(EFlags);
6509 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6510 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6511
6512 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6513 IEM_MC_COMMIT_EFLAGS(EFlags);
6514 IEM_MC_ADVANCE_RIP();
6515 IEM_MC_END();
6516 return VINF_SUCCESS;
6517
6518 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6519 }
6520 }
6521}
6522
6523
6524
6525/** Opcode 0x0f 0xa4. */
6526FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6527{
6528 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6529 IEMOP_HLP_MIN_386();
6530 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6531}
6532
6533
6534/** Opcode 0x0f 0xa5. */
6535FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6536{
6537 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6538 IEMOP_HLP_MIN_386();
6539 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6540}
6541
6542
6543/** Opcode 0x0f 0xa8. */
6544FNIEMOP_DEF(iemOp_push_gs)
6545{
6546 IEMOP_MNEMONIC(push_gs, "push gs");
6547 IEMOP_HLP_MIN_386();
6548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6549 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6550}
6551
6552
6553/** Opcode 0x0f 0xa9. */
6554FNIEMOP_DEF(iemOp_pop_gs)
6555{
6556 IEMOP_MNEMONIC(pop_gs, "pop gs");
6557 IEMOP_HLP_MIN_386();
6558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6559 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6560}
6561
6562
6563/** Opcode 0x0f 0xaa. */
6564FNIEMOP_DEF(iemOp_rsm)
6565{
6566 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
6567 IEMOP_HLP_MIN_386(); /* 386SL and later. */
6568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6569 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
6570}
6571
6572
6573
6574/** Opcode 0x0f 0xab. */
6575FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6576{
6577 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6578 IEMOP_HLP_MIN_386();
6579 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6580}
6581
6582
6583/** Opcode 0x0f 0xac. */
6584FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6585{
6586 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6587 IEMOP_HLP_MIN_386();
6588 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6589}
6590
6591
6592/** Opcode 0x0f 0xad. */
6593FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6594{
6595 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6596 IEMOP_HLP_MIN_386();
6597 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6598}
6599
6600
6601/** Opcode 0x0f 0xae mem/0. */
6602FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6603{
6604 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6605 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6606 return IEMOP_RAISE_INVALID_OPCODE();
6607
6608 IEM_MC_BEGIN(3, 1);
6609 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6610 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6611 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6614 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6615 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6616 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6617 IEM_MC_END();
6618 return VINF_SUCCESS;
6619}
6620
6621
6622/** Opcode 0x0f 0xae mem/1. */
6623FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6624{
6625 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6626 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6627 return IEMOP_RAISE_INVALID_OPCODE();
6628
6629 IEM_MC_BEGIN(3, 1);
6630 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6631 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6632 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6635 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6636 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6637 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6638 IEM_MC_END();
6639 return VINF_SUCCESS;
6640}
6641
6642
6643/**
6644 * @opmaps grp15
6645 * @opcode !11/2
6646 * @oppfx none
6647 * @opcpuid sse
6648 * @opgroup og_sse_mxcsrsm
6649 * @opxcpttype 5
6650 * @optest op1=0 -> mxcsr=0
6651 * @optest op1=0x2083 -> mxcsr=0x2083
6652 * @optest op1=0xfffffffe -> value.xcpt=0xd
6653 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6654 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6655 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6656 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6657 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6658 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6659 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6660 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6661 */
6662FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6663{
6664 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6665 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6666 return IEMOP_RAISE_INVALID_OPCODE();
6667
6668 IEM_MC_BEGIN(2, 0);
6669 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6670 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6673 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6674 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6675 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6676 IEM_MC_END();
6677 return VINF_SUCCESS;
6678}
6679
6680
6681/**
6682 * @opmaps grp15
6683 * @opcode !11/3
6684 * @oppfx none
6685 * @opcpuid sse
6686 * @opgroup og_sse_mxcsrsm
6687 * @opxcpttype 5
6688 * @optest mxcsr=0 -> op1=0
6689 * @optest mxcsr=0x2083 -> op1=0x2083
6690 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6691 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6692 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6693 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6694 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6695 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6696 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6697 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6698 */
6699FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6700{
6701 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6702 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6703 return IEMOP_RAISE_INVALID_OPCODE();
6704
6705 IEM_MC_BEGIN(2, 0);
6706 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6707 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6710 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6711 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6712 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6713 IEM_MC_END();
6714 return VINF_SUCCESS;
6715}
6716
6717
6718/**
6719 * @opmaps grp15
6720 * @opcode !11/4
6721 * @oppfx none
6722 * @opcpuid xsave
6723 * @opgroup og_system
6724 * @opxcpttype none
6725 */
6726FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6727{
6728 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
6729 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6730 return IEMOP_RAISE_INVALID_OPCODE();
6731
6732 IEM_MC_BEGIN(3, 0);
6733 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6734 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6735 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6738 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6739 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6740 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6741 IEM_MC_END();
6742 return VINF_SUCCESS;
6743}
6744
6745
6746/**
6747 * @opmaps grp15
6748 * @opcode !11/5
6749 * @oppfx none
6750 * @opcpuid xsave
6751 * @opgroup og_system
6752 * @opxcpttype none
6753 */
6754FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6755{
6756 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
6757 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6758 return IEMOP_RAISE_INVALID_OPCODE();
6759
6760 IEM_MC_BEGIN(3, 0);
6761 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6762 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6763 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6766 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6767 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6768 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6769 IEM_MC_END();
6770 return VINF_SUCCESS;
6771}
6772
6773/** Opcode 0x0f 0xae mem/6. */
6774FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6775
6776/**
6777 * @opmaps grp15
6778 * @opcode !11/7
6779 * @oppfx none
6780 * @opcpuid clfsh
6781 * @opgroup og_cachectl
6782 * @optest op1=1 ->
6783 */
6784FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6785{
6786 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6787 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6788 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6789
6790 IEM_MC_BEGIN(2, 0);
6791 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6792 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6795 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6796 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6797 IEM_MC_END();
6798 return VINF_SUCCESS;
6799}
6800
6801/**
6802 * @opmaps grp15
6803 * @opcode !11/7
6804 * @oppfx 0x66
6805 * @opcpuid clflushopt
6806 * @opgroup og_cachectl
6807 * @optest op1=1 ->
6808 */
6809FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6810{
6811 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6812 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6813 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6814
6815 IEM_MC_BEGIN(2, 0);
6816 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6817 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6820 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6821 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6822 IEM_MC_END();
6823 return VINF_SUCCESS;
6824}
6825
6826
6827/** Opcode 0x0f 0xae 11b/5. */
6828FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6829{
6830 RT_NOREF_PV(bRm);
6831 IEMOP_MNEMONIC(lfence, "lfence");
6832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6833 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6834 return IEMOP_RAISE_INVALID_OPCODE();
6835
6836 IEM_MC_BEGIN(0, 0);
6837 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6838 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6839 else
6840 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6841 IEM_MC_ADVANCE_RIP();
6842 IEM_MC_END();
6843 return VINF_SUCCESS;
6844}
6845
6846
6847/** Opcode 0x0f 0xae 11b/6. */
6848FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6849{
6850 RT_NOREF_PV(bRm);
6851 IEMOP_MNEMONIC(mfence, "mfence");
6852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6853 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6854 return IEMOP_RAISE_INVALID_OPCODE();
6855
6856 IEM_MC_BEGIN(0, 0);
6857 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6858 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6859 else
6860 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6861 IEM_MC_ADVANCE_RIP();
6862 IEM_MC_END();
6863 return VINF_SUCCESS;
6864}
6865
6866
6867/** Opcode 0x0f 0xae 11b/7. */
6868FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6869{
6870 RT_NOREF_PV(bRm);
6871 IEMOP_MNEMONIC(sfence, "sfence");
6872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6873 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6874 return IEMOP_RAISE_INVALID_OPCODE();
6875
6876 IEM_MC_BEGIN(0, 0);
6877 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6878 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6879 else
6880 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6881 IEM_MC_ADVANCE_RIP();
6882 IEM_MC_END();
6883 return VINF_SUCCESS;
6884}
6885
6886
6887/** Opcode 0xf3 0x0f 0xae 11b/0. */
6888FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
6889{
6890 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
6891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6892 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
6893 {
6894 IEM_MC_BEGIN(1, 0);
6895 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6896 IEM_MC_ARG(uint64_t, u64Dst, 0);
6897 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
6898 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
6899 IEM_MC_ADVANCE_RIP();
6900 IEM_MC_END();
6901 }
6902 else
6903 {
6904 IEM_MC_BEGIN(1, 0);
6905 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6906 IEM_MC_ARG(uint32_t, u32Dst, 0);
6907 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
6908 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
6909 IEM_MC_ADVANCE_RIP();
6910 IEM_MC_END();
6911 }
6912 return VINF_SUCCESS;
6913}
6914
6915
6916/** Opcode 0xf3 0x0f 0xae 11b/1. */
6917FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
6918{
6919 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
6920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6921 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
6922 {
6923 IEM_MC_BEGIN(1, 0);
6924 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6925 IEM_MC_ARG(uint64_t, u64Dst, 0);
6926 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
6927 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
6928 IEM_MC_ADVANCE_RIP();
6929 IEM_MC_END();
6930 }
6931 else
6932 {
6933 IEM_MC_BEGIN(1, 0);
6934 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6935 IEM_MC_ARG(uint32_t, u32Dst, 0);
6936 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
6937 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
6938 IEM_MC_ADVANCE_RIP();
6939 IEM_MC_END();
6940 }
6941 return VINF_SUCCESS;
6942}
6943
6944
6945/** Opcode 0xf3 0x0f 0xae 11b/2. */
6946FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
6947{
6948 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
6949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6950 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
6951 {
6952 IEM_MC_BEGIN(1, 0);
6953 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6954 IEM_MC_ARG(uint64_t, u64Dst, 0);
6955 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6956 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
6957 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
6958 IEM_MC_ADVANCE_RIP();
6959 IEM_MC_END();
6960 }
6961 else
6962 {
6963 IEM_MC_BEGIN(1, 0);
6964 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6965 IEM_MC_ARG(uint32_t, u32Dst, 0);
6966 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6967 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
6968 IEM_MC_ADVANCE_RIP();
6969 IEM_MC_END();
6970 }
6971 return VINF_SUCCESS;
6972}
6973
6974
6975/** Opcode 0xf3 0x0f 0xae 11b/3. */
6976FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
6977{
6978 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
6979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6980 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
6981 {
6982 IEM_MC_BEGIN(1, 0);
6983 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6984 IEM_MC_ARG(uint64_t, u64Dst, 0);
6985 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6986 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
6987 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
6988 IEM_MC_ADVANCE_RIP();
6989 IEM_MC_END();
6990 }
6991 else
6992 {
6993 IEM_MC_BEGIN(1, 0);
6994 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6995 IEM_MC_ARG(uint32_t, u32Dst, 0);
6996 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6997 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
6998 IEM_MC_ADVANCE_RIP();
6999 IEM_MC_END();
7000 }
7001 return VINF_SUCCESS;
7002}
7003
7004
7005/**
7006 * Group 15 jump table for register variant.
7007 */
7008IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
7009{ /* pfx: none, 066h, 0f3h, 0f2h */
7010 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
7011 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
7012 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
7013 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
7014 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7015 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7016 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7017 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7018};
7019AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
7020
7021
7022/**
7023 * Group 15 jump table for memory variant.
7024 */
7025IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
7026{ /* pfx: none, 066h, 0f3h, 0f2h */
7027 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7028 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7029 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7030 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7031 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7032 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7033 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7034 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7035};
7036AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
7037
7038
7039/** Opcode 0x0f 0xae. */
7040FNIEMOP_DEF(iemOp_Grp15)
7041{
7042 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
7043 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7044 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7045 /* register, register */
7046 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7047 + pVCpu->iem.s.idxPrefix], bRm);
7048 /* memory, register */
7049 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7050 + pVCpu->iem.s.idxPrefix], bRm);
7051}
7052
7053
7054/** Opcode 0x0f 0xaf. */
7055FNIEMOP_DEF(iemOp_imul_Gv_Ev)
7056{
7057 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
7058 IEMOP_HLP_MIN_386();
7059 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7060 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
7061}
7062
7063
7064/** Opcode 0x0f 0xb0. */
7065FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
7066{
7067 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
7068 IEMOP_HLP_MIN_486();
7069 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7070
7071 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7072 {
7073 IEMOP_HLP_DONE_DECODING();
7074 IEM_MC_BEGIN(4, 0);
7075 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7076 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7077 IEM_MC_ARG(uint8_t, u8Src, 2);
7078 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7079
7080 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7081 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7082 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
7083 IEM_MC_REF_EFLAGS(pEFlags);
7084 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7085 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7086 else
7087 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7088
7089 IEM_MC_ADVANCE_RIP();
7090 IEM_MC_END();
7091 }
7092 else
7093 {
7094 IEM_MC_BEGIN(4, 3);
7095 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7096 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7097 IEM_MC_ARG(uint8_t, u8Src, 2);
7098 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7099 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7100 IEM_MC_LOCAL(uint8_t, u8Al);
7101
7102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7103 IEMOP_HLP_DONE_DECODING();
7104 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7105 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7106 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
7107 IEM_MC_FETCH_EFLAGS(EFlags);
7108 IEM_MC_REF_LOCAL(pu8Al, u8Al);
7109 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7110 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7111 else
7112 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7113
7114 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7115 IEM_MC_COMMIT_EFLAGS(EFlags);
7116 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
7117 IEM_MC_ADVANCE_RIP();
7118 IEM_MC_END();
7119 }
7120 return VINF_SUCCESS;
7121}
7122
7123/** Opcode 0x0f 0xb1. */
7124FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
7125{
7126 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
7127 IEMOP_HLP_MIN_486();
7128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7129
7130 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7131 {
7132 IEMOP_HLP_DONE_DECODING();
7133 switch (pVCpu->iem.s.enmEffOpSize)
7134 {
7135 case IEMMODE_16BIT:
7136 IEM_MC_BEGIN(4, 0);
7137 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7138 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7139 IEM_MC_ARG(uint16_t, u16Src, 2);
7140 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7141
7142 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7143 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7144 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
7145 IEM_MC_REF_EFLAGS(pEFlags);
7146 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7147 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7148 else
7149 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7150
7151 IEM_MC_ADVANCE_RIP();
7152 IEM_MC_END();
7153 return VINF_SUCCESS;
7154
7155 case IEMMODE_32BIT:
7156 IEM_MC_BEGIN(4, 0);
7157 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7158 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7159 IEM_MC_ARG(uint32_t, u32Src, 2);
7160 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7161
7162 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7163 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7164 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
7165 IEM_MC_REF_EFLAGS(pEFlags);
7166 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7167 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7168 else
7169 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7170
7171 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
7172 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7173 IEM_MC_ADVANCE_RIP();
7174 IEM_MC_END();
7175 return VINF_SUCCESS;
7176
7177 case IEMMODE_64BIT:
7178 IEM_MC_BEGIN(4, 0);
7179 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7180 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7181#ifdef RT_ARCH_X86
7182 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7183#else
7184 IEM_MC_ARG(uint64_t, u64Src, 2);
7185#endif
7186 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7187
7188 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7189 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
7190 IEM_MC_REF_EFLAGS(pEFlags);
7191#ifdef RT_ARCH_X86
7192 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7193 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7194 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7195 else
7196 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7197#else
7198 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7199 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7200 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7201 else
7202 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7203#endif
7204
7205 IEM_MC_ADVANCE_RIP();
7206 IEM_MC_END();
7207 return VINF_SUCCESS;
7208
7209 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7210 }
7211 }
7212 else
7213 {
7214 switch (pVCpu->iem.s.enmEffOpSize)
7215 {
7216 case IEMMODE_16BIT:
7217 IEM_MC_BEGIN(4, 3);
7218 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7219 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7220 IEM_MC_ARG(uint16_t, u16Src, 2);
7221 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7222 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7223 IEM_MC_LOCAL(uint16_t, u16Ax);
7224
7225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7226 IEMOP_HLP_DONE_DECODING();
7227 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7228 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7229 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
7230 IEM_MC_FETCH_EFLAGS(EFlags);
7231 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
7232 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7233 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7234 else
7235 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7236
7237 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7238 IEM_MC_COMMIT_EFLAGS(EFlags);
7239 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
7240 IEM_MC_ADVANCE_RIP();
7241 IEM_MC_END();
7242 return VINF_SUCCESS;
7243
7244 case IEMMODE_32BIT:
7245 IEM_MC_BEGIN(4, 3);
7246 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7247 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7248 IEM_MC_ARG(uint32_t, u32Src, 2);
7249 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7251 IEM_MC_LOCAL(uint32_t, u32Eax);
7252
7253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7254 IEMOP_HLP_DONE_DECODING();
7255 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7256 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7257 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
7258 IEM_MC_FETCH_EFLAGS(EFlags);
7259 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
7260 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7261 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7262 else
7263 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7264
7265 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7266 IEM_MC_COMMIT_EFLAGS(EFlags);
7267 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
7268 IEM_MC_ADVANCE_RIP();
7269 IEM_MC_END();
7270 return VINF_SUCCESS;
7271
7272 case IEMMODE_64BIT:
7273 IEM_MC_BEGIN(4, 3);
7274 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7275 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7276#ifdef RT_ARCH_X86
7277 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7278#else
7279 IEM_MC_ARG(uint64_t, u64Src, 2);
7280#endif
7281 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7283 IEM_MC_LOCAL(uint64_t, u64Rax);
7284
7285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7286 IEMOP_HLP_DONE_DECODING();
7287 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7288 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
7289 IEM_MC_FETCH_EFLAGS(EFlags);
7290 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
7291#ifdef RT_ARCH_X86
7292 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7293 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7294 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7295 else
7296 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7297#else
7298 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7299 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7300 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7301 else
7302 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7303#endif
7304
7305 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7306 IEM_MC_COMMIT_EFLAGS(EFlags);
7307 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
7308 IEM_MC_ADVANCE_RIP();
7309 IEM_MC_END();
7310 return VINF_SUCCESS;
7311
7312 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7313 }
7314 }
7315}
7316
7317
7318FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
7319{
7320 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
7321 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
7322
7323 switch (pVCpu->iem.s.enmEffOpSize)
7324 {
7325 case IEMMODE_16BIT:
7326 IEM_MC_BEGIN(5, 1);
7327 IEM_MC_ARG(uint16_t, uSel, 0);
7328 IEM_MC_ARG(uint16_t, offSeg, 1);
7329 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7330 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7331 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7332 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7335 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7336 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
7337 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7338 IEM_MC_END();
7339 return VINF_SUCCESS;
7340
7341 case IEMMODE_32BIT:
7342 IEM_MC_BEGIN(5, 1);
7343 IEM_MC_ARG(uint16_t, uSel, 0);
7344 IEM_MC_ARG(uint32_t, offSeg, 1);
7345 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7346 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7347 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7348 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7351 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7352 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
7353 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7354 IEM_MC_END();
7355 return VINF_SUCCESS;
7356
7357 case IEMMODE_64BIT:
7358 IEM_MC_BEGIN(5, 1);
7359 IEM_MC_ARG(uint16_t, uSel, 0);
7360 IEM_MC_ARG(uint64_t, offSeg, 1);
7361 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7362 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7363 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7364 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7367 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
7368 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7369 else
7370 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7371 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
7372 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7373 IEM_MC_END();
7374 return VINF_SUCCESS;
7375
7376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7377 }
7378}
7379
7380
7381/** Opcode 0x0f 0xb2. */
7382FNIEMOP_DEF(iemOp_lss_Gv_Mp)
7383{
7384 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
7385 IEMOP_HLP_MIN_386();
7386 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7387 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7388 return IEMOP_RAISE_INVALID_OPCODE();
7389 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
7390}
7391
7392
7393/** Opcode 0x0f 0xb3. */
7394FNIEMOP_DEF(iemOp_btr_Ev_Gv)
7395{
7396 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
7397 IEMOP_HLP_MIN_386();
7398 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
7399}
7400
7401
7402/** Opcode 0x0f 0xb4. */
7403FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
7404{
7405 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
7406 IEMOP_HLP_MIN_386();
7407 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7408 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7409 return IEMOP_RAISE_INVALID_OPCODE();
7410 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
7411}
7412
7413
7414/** Opcode 0x0f 0xb5. */
7415FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
7416{
7417 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
7418 IEMOP_HLP_MIN_386();
7419 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7420 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7421 return IEMOP_RAISE_INVALID_OPCODE();
7422 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
7423}
7424
7425
7426/** Opcode 0x0f 0xb6. */
7427FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
7428{
7429 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
7430 IEMOP_HLP_MIN_386();
7431
7432 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7433
7434 /*
7435 * If rm is denoting a register, no more instruction bytes.
7436 */
7437 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7438 {
7439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7440 switch (pVCpu->iem.s.enmEffOpSize)
7441 {
7442 case IEMMODE_16BIT:
7443 IEM_MC_BEGIN(0, 1);
7444 IEM_MC_LOCAL(uint16_t, u16Value);
7445 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7446 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7447 IEM_MC_ADVANCE_RIP();
7448 IEM_MC_END();
7449 return VINF_SUCCESS;
7450
7451 case IEMMODE_32BIT:
7452 IEM_MC_BEGIN(0, 1);
7453 IEM_MC_LOCAL(uint32_t, u32Value);
7454 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7455 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7456 IEM_MC_ADVANCE_RIP();
7457 IEM_MC_END();
7458 return VINF_SUCCESS;
7459
7460 case IEMMODE_64BIT:
7461 IEM_MC_BEGIN(0, 1);
7462 IEM_MC_LOCAL(uint64_t, u64Value);
7463 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7464 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7465 IEM_MC_ADVANCE_RIP();
7466 IEM_MC_END();
7467 return VINF_SUCCESS;
7468
7469 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7470 }
7471 }
7472 else
7473 {
7474 /*
7475 * We're loading a register from memory.
7476 */
7477 switch (pVCpu->iem.s.enmEffOpSize)
7478 {
7479 case IEMMODE_16BIT:
7480 IEM_MC_BEGIN(0, 2);
7481 IEM_MC_LOCAL(uint16_t, u16Value);
7482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7485 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7486 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7487 IEM_MC_ADVANCE_RIP();
7488 IEM_MC_END();
7489 return VINF_SUCCESS;
7490
7491 case IEMMODE_32BIT:
7492 IEM_MC_BEGIN(0, 2);
7493 IEM_MC_LOCAL(uint32_t, u32Value);
7494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7497 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7498 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7499 IEM_MC_ADVANCE_RIP();
7500 IEM_MC_END();
7501 return VINF_SUCCESS;
7502
7503 case IEMMODE_64BIT:
7504 IEM_MC_BEGIN(0, 2);
7505 IEM_MC_LOCAL(uint64_t, u64Value);
7506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7509 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7510 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7511 IEM_MC_ADVANCE_RIP();
7512 IEM_MC_END();
7513 return VINF_SUCCESS;
7514
7515 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7516 }
7517 }
7518}
7519
7520
7521/** Opcode 0x0f 0xb7. */
7522FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7523{
7524 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7525 IEMOP_HLP_MIN_386();
7526
7527 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7528
7529 /** @todo Not entirely sure how the operand size prefix is handled here,
7530 * assuming that it will be ignored. Would be nice to have a few
7531 * test for this. */
7532 /*
7533 * If rm is denoting a register, no more instruction bytes.
7534 */
7535 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7536 {
7537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7538 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7539 {
7540 IEM_MC_BEGIN(0, 1);
7541 IEM_MC_LOCAL(uint32_t, u32Value);
7542 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7543 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7544 IEM_MC_ADVANCE_RIP();
7545 IEM_MC_END();
7546 }
7547 else
7548 {
7549 IEM_MC_BEGIN(0, 1);
7550 IEM_MC_LOCAL(uint64_t, u64Value);
7551 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7552 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7553 IEM_MC_ADVANCE_RIP();
7554 IEM_MC_END();
7555 }
7556 }
7557 else
7558 {
7559 /*
7560 * We're loading a register from memory.
7561 */
7562 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7563 {
7564 IEM_MC_BEGIN(0, 2);
7565 IEM_MC_LOCAL(uint32_t, u32Value);
7566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7569 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7570 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7571 IEM_MC_ADVANCE_RIP();
7572 IEM_MC_END();
7573 }
7574 else
7575 {
7576 IEM_MC_BEGIN(0, 2);
7577 IEM_MC_LOCAL(uint64_t, u64Value);
7578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7581 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7582 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7583 IEM_MC_ADVANCE_RIP();
7584 IEM_MC_END();
7585 }
7586 }
7587 return VINF_SUCCESS;
7588}
7589
7590
7591/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7592FNIEMOP_UD_STUB(iemOp_jmpe);
7593/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7594FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7595
7596
7597/**
7598 * @opcode 0xb9
7599 * @opinvalid intel-modrm
7600 * @optest ->
7601 */
7602FNIEMOP_DEF(iemOp_Grp10)
7603{
7604 /*
7605 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7606 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7607 */
7608 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7609 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
7610 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7611}
7612
7613
7614/** Opcode 0x0f 0xba. */
7615FNIEMOP_DEF(iemOp_Grp8)
7616{
7617 IEMOP_HLP_MIN_386();
7618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7619 PCIEMOPBINSIZES pImpl;
7620 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7621 {
7622 case 0: case 1: case 2: case 3:
7623 /* Both AMD and Intel want full modr/m decoding and imm8. */
7624 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7625 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7626 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7627 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7628 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7629 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7630 }
7631 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7632
7633 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7634 {
7635 /* register destination. */
7636 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7638
7639 switch (pVCpu->iem.s.enmEffOpSize)
7640 {
7641 case IEMMODE_16BIT:
7642 IEM_MC_BEGIN(3, 0);
7643 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7644 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7645 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7646
7647 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7648 IEM_MC_REF_EFLAGS(pEFlags);
7649 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7650
7651 IEM_MC_ADVANCE_RIP();
7652 IEM_MC_END();
7653 return VINF_SUCCESS;
7654
7655 case IEMMODE_32BIT:
7656 IEM_MC_BEGIN(3, 0);
7657 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7658 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7659 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7660
7661 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7662 IEM_MC_REF_EFLAGS(pEFlags);
7663 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7664
7665 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7666 IEM_MC_ADVANCE_RIP();
7667 IEM_MC_END();
7668 return VINF_SUCCESS;
7669
7670 case IEMMODE_64BIT:
7671 IEM_MC_BEGIN(3, 0);
7672 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7673 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7674 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7675
7676 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7677 IEM_MC_REF_EFLAGS(pEFlags);
7678 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7679
7680 IEM_MC_ADVANCE_RIP();
7681 IEM_MC_END();
7682 return VINF_SUCCESS;
7683
7684 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7685 }
7686 }
7687 else
7688 {
7689 /* memory destination. */
7690
7691 uint32_t fAccess;
7692 if (pImpl->pfnLockedU16)
7693 fAccess = IEM_ACCESS_DATA_RW;
7694 else /* BT */
7695 fAccess = IEM_ACCESS_DATA_R;
7696
7697 /** @todo test negative bit offsets! */
7698 switch (pVCpu->iem.s.enmEffOpSize)
7699 {
7700 case IEMMODE_16BIT:
7701 IEM_MC_BEGIN(3, 1);
7702 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7703 IEM_MC_ARG(uint16_t, u16Src, 1);
7704 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7706
7707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7708 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7709 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7710 if (pImpl->pfnLockedU16)
7711 IEMOP_HLP_DONE_DECODING();
7712 else
7713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7714 IEM_MC_FETCH_EFLAGS(EFlags);
7715 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7716 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7717 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7718 else
7719 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7720 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7721
7722 IEM_MC_COMMIT_EFLAGS(EFlags);
7723 IEM_MC_ADVANCE_RIP();
7724 IEM_MC_END();
7725 return VINF_SUCCESS;
7726
7727 case IEMMODE_32BIT:
7728 IEM_MC_BEGIN(3, 1);
7729 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7730 IEM_MC_ARG(uint32_t, u32Src, 1);
7731 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7733
7734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7735 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7736 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7737 if (pImpl->pfnLockedU16)
7738 IEMOP_HLP_DONE_DECODING();
7739 else
7740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7741 IEM_MC_FETCH_EFLAGS(EFlags);
7742 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7743 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7744 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7745 else
7746 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7747 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7748
7749 IEM_MC_COMMIT_EFLAGS(EFlags);
7750 IEM_MC_ADVANCE_RIP();
7751 IEM_MC_END();
7752 return VINF_SUCCESS;
7753
7754 case IEMMODE_64BIT:
7755 IEM_MC_BEGIN(3, 1);
7756 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7757 IEM_MC_ARG(uint64_t, u64Src, 1);
7758 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7760
7761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7762 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7763 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7764 if (pImpl->pfnLockedU16)
7765 IEMOP_HLP_DONE_DECODING();
7766 else
7767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7768 IEM_MC_FETCH_EFLAGS(EFlags);
7769 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7770 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7771 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7772 else
7773 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7774 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7775
7776 IEM_MC_COMMIT_EFLAGS(EFlags);
7777 IEM_MC_ADVANCE_RIP();
7778 IEM_MC_END();
7779 return VINF_SUCCESS;
7780
7781 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7782 }
7783 }
7784}
7785
7786
7787/** Opcode 0x0f 0xbb. */
7788FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7789{
7790 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7791 IEMOP_HLP_MIN_386();
7792 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7793}
7794
7795
7796/** Opcode 0x0f 0xbc. */
7797FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7798{
7799 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7800 IEMOP_HLP_MIN_386();
7801 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7802 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7803}
7804
7805
7806/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7807FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7808
7809
7810/** Opcode 0x0f 0xbd. */
7811FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7812{
7813 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7814 IEMOP_HLP_MIN_386();
7815 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7816 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7817}
7818
7819
7820/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7821FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7822
7823
7824/** Opcode 0x0f 0xbe. */
7825FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7826{
7827 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7828 IEMOP_HLP_MIN_386();
7829
7830 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7831
7832 /*
7833 * If rm is denoting a register, no more instruction bytes.
7834 */
7835 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7836 {
7837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7838 switch (pVCpu->iem.s.enmEffOpSize)
7839 {
7840 case IEMMODE_16BIT:
7841 IEM_MC_BEGIN(0, 1);
7842 IEM_MC_LOCAL(uint16_t, u16Value);
7843 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7844 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7845 IEM_MC_ADVANCE_RIP();
7846 IEM_MC_END();
7847 return VINF_SUCCESS;
7848
7849 case IEMMODE_32BIT:
7850 IEM_MC_BEGIN(0, 1);
7851 IEM_MC_LOCAL(uint32_t, u32Value);
7852 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7853 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7854 IEM_MC_ADVANCE_RIP();
7855 IEM_MC_END();
7856 return VINF_SUCCESS;
7857
7858 case IEMMODE_64BIT:
7859 IEM_MC_BEGIN(0, 1);
7860 IEM_MC_LOCAL(uint64_t, u64Value);
7861 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7862 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7863 IEM_MC_ADVANCE_RIP();
7864 IEM_MC_END();
7865 return VINF_SUCCESS;
7866
7867 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7868 }
7869 }
7870 else
7871 {
7872 /*
7873 * We're loading a register from memory.
7874 */
7875 switch (pVCpu->iem.s.enmEffOpSize)
7876 {
7877 case IEMMODE_16BIT:
7878 IEM_MC_BEGIN(0, 2);
7879 IEM_MC_LOCAL(uint16_t, u16Value);
7880 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7881 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7883 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7884 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7885 IEM_MC_ADVANCE_RIP();
7886 IEM_MC_END();
7887 return VINF_SUCCESS;
7888
7889 case IEMMODE_32BIT:
7890 IEM_MC_BEGIN(0, 2);
7891 IEM_MC_LOCAL(uint32_t, u32Value);
7892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7895 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7896 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7897 IEM_MC_ADVANCE_RIP();
7898 IEM_MC_END();
7899 return VINF_SUCCESS;
7900
7901 case IEMMODE_64BIT:
7902 IEM_MC_BEGIN(0, 2);
7903 IEM_MC_LOCAL(uint64_t, u64Value);
7904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7905 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7907 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7908 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7909 IEM_MC_ADVANCE_RIP();
7910 IEM_MC_END();
7911 return VINF_SUCCESS;
7912
7913 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7914 }
7915 }
7916}
7917
7918
7919/** Opcode 0x0f 0xbf. */
7920FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7921{
7922 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7923 IEMOP_HLP_MIN_386();
7924
7925 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7926
7927 /** @todo Not entirely sure how the operand size prefix is handled here,
7928 * assuming that it will be ignored. Would be nice to have a few
7929 * test for this. */
7930 /*
7931 * If rm is denoting a register, no more instruction bytes.
7932 */
7933 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7934 {
7935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7936 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7937 {
7938 IEM_MC_BEGIN(0, 1);
7939 IEM_MC_LOCAL(uint32_t, u32Value);
7940 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7941 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7942 IEM_MC_ADVANCE_RIP();
7943 IEM_MC_END();
7944 }
7945 else
7946 {
7947 IEM_MC_BEGIN(0, 1);
7948 IEM_MC_LOCAL(uint64_t, u64Value);
7949 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7950 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7951 IEM_MC_ADVANCE_RIP();
7952 IEM_MC_END();
7953 }
7954 }
7955 else
7956 {
7957 /*
7958 * We're loading a register from memory.
7959 */
7960 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7961 {
7962 IEM_MC_BEGIN(0, 2);
7963 IEM_MC_LOCAL(uint32_t, u32Value);
7964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7965 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7967 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7968 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7969 IEM_MC_ADVANCE_RIP();
7970 IEM_MC_END();
7971 }
7972 else
7973 {
7974 IEM_MC_BEGIN(0, 2);
7975 IEM_MC_LOCAL(uint64_t, u64Value);
7976 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7977 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7979 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7980 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7981 IEM_MC_ADVANCE_RIP();
7982 IEM_MC_END();
7983 }
7984 }
7985 return VINF_SUCCESS;
7986}
7987
7988
7989/** Opcode 0x0f 0xc0. */
7990FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7991{
7992 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7993 IEMOP_HLP_MIN_486();
7994 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7995
7996 /*
7997 * If rm is denoting a register, no more instruction bytes.
7998 */
7999 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8000 {
8001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8002
8003 IEM_MC_BEGIN(3, 0);
8004 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8005 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8006 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8007
8008 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8009 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8010 IEM_MC_REF_EFLAGS(pEFlags);
8011 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8012
8013 IEM_MC_ADVANCE_RIP();
8014 IEM_MC_END();
8015 }
8016 else
8017 {
8018 /*
8019 * We're accessing memory.
8020 */
8021 IEM_MC_BEGIN(3, 3);
8022 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8023 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8024 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8025 IEM_MC_LOCAL(uint8_t, u8RegCopy);
8026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8027
8028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8029 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8030 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8031 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
8032 IEM_MC_FETCH_EFLAGS(EFlags);
8033 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8034 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8035 else
8036 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
8037
8038 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
8039 IEM_MC_COMMIT_EFLAGS(EFlags);
8040 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
8041 IEM_MC_ADVANCE_RIP();
8042 IEM_MC_END();
8043 return VINF_SUCCESS;
8044 }
8045 return VINF_SUCCESS;
8046}
8047
8048
8049/** Opcode 0x0f 0xc1. */
8050FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
8051{
8052 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
8053 IEMOP_HLP_MIN_486();
8054 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8055
8056 /*
8057 * If rm is denoting a register, no more instruction bytes.
8058 */
8059 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8060 {
8061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8062
8063 switch (pVCpu->iem.s.enmEffOpSize)
8064 {
8065 case IEMMODE_16BIT:
8066 IEM_MC_BEGIN(3, 0);
8067 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8068 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8069 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8070
8071 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8072 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8073 IEM_MC_REF_EFLAGS(pEFlags);
8074 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8075
8076 IEM_MC_ADVANCE_RIP();
8077 IEM_MC_END();
8078 return VINF_SUCCESS;
8079
8080 case IEMMODE_32BIT:
8081 IEM_MC_BEGIN(3, 0);
8082 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8083 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8084 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8085
8086 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8087 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8088 IEM_MC_REF_EFLAGS(pEFlags);
8089 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8090
8091 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8092 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
8093 IEM_MC_ADVANCE_RIP();
8094 IEM_MC_END();
8095 return VINF_SUCCESS;
8096
8097 case IEMMODE_64BIT:
8098 IEM_MC_BEGIN(3, 0);
8099 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8100 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8101 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8102
8103 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8104 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8105 IEM_MC_REF_EFLAGS(pEFlags);
8106 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8107
8108 IEM_MC_ADVANCE_RIP();
8109 IEM_MC_END();
8110 return VINF_SUCCESS;
8111
8112 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8113 }
8114 }
8115 else
8116 {
8117 /*
8118 * We're accessing memory.
8119 */
8120 switch (pVCpu->iem.s.enmEffOpSize)
8121 {
8122 case IEMMODE_16BIT:
8123 IEM_MC_BEGIN(3, 3);
8124 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8125 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8126 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8127 IEM_MC_LOCAL(uint16_t, u16RegCopy);
8128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8129
8130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8131 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8132 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8133 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
8134 IEM_MC_FETCH_EFLAGS(EFlags);
8135 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8136 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8137 else
8138 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
8139
8140 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8141 IEM_MC_COMMIT_EFLAGS(EFlags);
8142 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
8143 IEM_MC_ADVANCE_RIP();
8144 IEM_MC_END();
8145 return VINF_SUCCESS;
8146
8147 case IEMMODE_32BIT:
8148 IEM_MC_BEGIN(3, 3);
8149 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8150 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8151 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8152 IEM_MC_LOCAL(uint32_t, u32RegCopy);
8153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8154
8155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8156 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8157 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8158 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
8159 IEM_MC_FETCH_EFLAGS(EFlags);
8160 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8161 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8162 else
8163 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
8164
8165 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8166 IEM_MC_COMMIT_EFLAGS(EFlags);
8167 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
8168 IEM_MC_ADVANCE_RIP();
8169 IEM_MC_END();
8170 return VINF_SUCCESS;
8171
8172 case IEMMODE_64BIT:
8173 IEM_MC_BEGIN(3, 3);
8174 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8175 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8176 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8177 IEM_MC_LOCAL(uint64_t, u64RegCopy);
8178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8179
8180 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8181 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8182 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8183 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
8184 IEM_MC_FETCH_EFLAGS(EFlags);
8185 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8186 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8187 else
8188 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
8189
8190 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8191 IEM_MC_COMMIT_EFLAGS(EFlags);
8192 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
8193 IEM_MC_ADVANCE_RIP();
8194 IEM_MC_END();
8195 return VINF_SUCCESS;
8196
8197 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8198 }
8199 }
8200}
8201
8202
8203/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
8204FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
8205/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
8206FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
8207/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
8208FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
8209/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
8210FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
8211
8212
8213/** Opcode 0x0f 0xc3. */
8214FNIEMOP_DEF(iemOp_movnti_My_Gy)
8215{
8216 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
8217
8218 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8219
8220 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
8221 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8222 {
8223 switch (pVCpu->iem.s.enmEffOpSize)
8224 {
8225 case IEMMODE_32BIT:
8226 IEM_MC_BEGIN(0, 2);
8227 IEM_MC_LOCAL(uint32_t, u32Value);
8228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8229
8230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8232 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8233 return IEMOP_RAISE_INVALID_OPCODE();
8234
8235 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8236 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
8237 IEM_MC_ADVANCE_RIP();
8238 IEM_MC_END();
8239 break;
8240
8241 case IEMMODE_64BIT:
8242 IEM_MC_BEGIN(0, 2);
8243 IEM_MC_LOCAL(uint64_t, u64Value);
8244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8245
8246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8248 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8249 return IEMOP_RAISE_INVALID_OPCODE();
8250
8251 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8252 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
8253 IEM_MC_ADVANCE_RIP();
8254 IEM_MC_END();
8255 break;
8256
8257 case IEMMODE_16BIT:
8258 /** @todo check this form. */
8259 return IEMOP_RAISE_INVALID_OPCODE();
8260 }
8261 }
8262 else
8263 return IEMOP_RAISE_INVALID_OPCODE();
8264 return VINF_SUCCESS;
8265}
8266/* Opcode 0x66 0x0f 0xc3 - invalid */
8267/* Opcode 0xf3 0x0f 0xc3 - invalid */
8268/* Opcode 0xf2 0x0f 0xc3 - invalid */
8269
8270/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
8271FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
8272/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
8273FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
8274/* Opcode 0xf3 0x0f 0xc4 - invalid */
8275/* Opcode 0xf2 0x0f 0xc4 - invalid */
8276
8277/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
8278FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
8279/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
8280FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
8281/* Opcode 0xf3 0x0f 0xc5 - invalid */
8282/* Opcode 0xf2 0x0f 0xc5 - invalid */
8283
8284/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
8285FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
8286/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
8287FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
8288/* Opcode 0xf3 0x0f 0xc6 - invalid */
8289/* Opcode 0xf2 0x0f 0xc6 - invalid */
8290
8291
8292/** Opcode 0x0f 0xc7 !11/1. */
8293FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
8294{
8295 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
8296
8297 IEM_MC_BEGIN(4, 3);
8298 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
8299 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
8300 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
8301 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8302 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
8303 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
8304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8305
8306 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8307 IEMOP_HLP_DONE_DECODING();
8308 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8309
8310 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
8311 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
8312 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
8313
8314 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
8315 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
8316 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
8317
8318 IEM_MC_FETCH_EFLAGS(EFlags);
8319 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8320 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8321 else
8322 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8323
8324 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
8325 IEM_MC_COMMIT_EFLAGS(EFlags);
8326 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8327 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
8328 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
8329 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
8330 IEM_MC_ENDIF();
8331 IEM_MC_ADVANCE_RIP();
8332
8333 IEM_MC_END();
8334 return VINF_SUCCESS;
8335}
8336
8337
8338/** Opcode REX.W 0x0f 0xc7 !11/1. */
8339FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
8340{
8341 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
8342 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8343 {
8344#if 0
8345 RT_NOREF(bRm);
8346 IEMOP_BITCH_ABOUT_STUB();
8347 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8348#else
8349 IEM_MC_BEGIN(4, 3);
8350 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
8351 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
8352 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
8353 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8354 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
8355 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
8356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8357
8358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8359 IEMOP_HLP_DONE_DECODING();
8360 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
8361 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8362
8363 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
8364 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
8365 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
8366
8367 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
8368 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
8369 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
8370
8371 IEM_MC_FETCH_EFLAGS(EFlags);
8372# ifdef RT_ARCH_AMD64
8373 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8374 {
8375 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8376 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8377 else
8378 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8379 }
8380 else
8381# endif
8382 {
8383 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
8384 accesses and not all all atomic, which works fine on in UNI CPU guest
8385 configuration (ignoring DMA). If guest SMP is active we have no choice
8386 but to use a rendezvous callback here. Sigh. */
8387 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
8388 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8389 else
8390 {
8391 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8392 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
8393 }
8394 }
8395
8396 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
8397 IEM_MC_COMMIT_EFLAGS(EFlags);
8398 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8399 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
8400 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
8401 IEM_MC_ENDIF();
8402 IEM_MC_ADVANCE_RIP();
8403
8404 IEM_MC_END();
8405 return VINF_SUCCESS;
8406#endif
8407 }
8408 Log(("cmpxchg16b -> #UD\n"));
8409 return IEMOP_RAISE_INVALID_OPCODE();
8410}
8411
8412FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
8413{
8414 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
8415 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
8416 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
8417}
8418
8419/** Opcode 0x0f 0xc7 11/6. */
8420FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
8421
8422/** Opcode 0x0f 0xc7 !11/6. */
8423#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8424FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
8425{
8426 IEMOP_MNEMONIC(vmptrld, "vmptrld");
8427 IEMOP_HLP_IN_VMX_OPERATION();
8428 IEMOP_HLP_VMX_INSTR();
8429 IEM_MC_BEGIN(1, 0);
8430 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 0);
8431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8432 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
8433 IEM_MC_CALL_CIMPL_1(iemCImpl_vmptrld, GCPtrEffSrc);
8434 IEM_MC_END();
8435 return VINF_SUCCESS;
8436}
8437#else
8438FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
8439#endif
8440
8441/** Opcode 0x66 0x0f 0xc7 !11/6. */
8442#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8443FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
8444{
8445 IEMOP_MNEMONIC(vmclear, "vmclear");
8446 IEMOP_HLP_IN_VMX_OPERATION();
8447 IEMOP_HLP_VMX_INSTR();
8448 IEM_MC_BEGIN(1, 0);
8449 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
8450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8451 IEMOP_HLP_DONE_DECODING();
8452 IEM_MC_CALL_CIMPL_1(iemCImpl_vmclear, GCPtrEffDst);
8453 IEM_MC_END();
8454 return VINF_SUCCESS;
8455}
8456#else
8457FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
8458#endif
8459
8460/** Opcode 0xf3 0x0f 0xc7 !11/6. */
8461#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8462FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
8463{
8464 IEMOP_MNEMONIC(vmxon, "vmxon");
8465 IEMOP_HLP_VMX_INSTR();
8466 IEM_MC_BEGIN(1, 0);
8467 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 0);
8468 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8469 IEMOP_HLP_DONE_DECODING();
8470 IEM_MC_CALL_CIMPL_1(iemCImpl_vmxon, GCPtrEffSrc);
8471 IEM_MC_END();
8472 return VINF_SUCCESS;
8473}
8474#else
8475FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
8476#endif
8477
8478/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
8479#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8480FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
8481{
8482 IEMOP_MNEMONIC(vmptrst, "vmptrst");
8483 IEMOP_HLP_IN_VMX_OPERATION();
8484 IEMOP_HLP_VMX_INSTR();
8485 IEM_MC_BEGIN(1, 0);
8486 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
8487 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8488 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
8489 IEM_MC_CALL_CIMPL_1(iemCImpl_vmptrst, GCPtrEffDst);
8490 IEM_MC_END();
8491 return VINF_SUCCESS;
8492}
8493#else
8494FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
8495#endif
8496
8497/** Opcode 0x0f 0xc7 11/7. */
8498FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
8499
8500
8501/**
8502 * Group 9 jump table for register variant.
8503 */
8504IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
8505{ /* pfx: none, 066h, 0f3h, 0f2h */
8506 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8507 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
8508 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8509 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8510 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8511 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8512 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8513 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8514};
8515AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
8516
8517
8518/**
8519 * Group 9 jump table for memory variant.
8520 */
8521IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
8522{ /* pfx: none, 066h, 0f3h, 0f2h */
8523 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8524 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
8525 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8526 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8527 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8528 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8529 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
8530 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8531};
8532AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
8533
8534
8535/** Opcode 0x0f 0xc7. */
8536FNIEMOP_DEF(iemOp_Grp9)
8537{
8538 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
8539 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8540 /* register, register */
8541 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8542 + pVCpu->iem.s.idxPrefix], bRm);
8543 /* memory, register */
8544 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8545 + pVCpu->iem.s.idxPrefix], bRm);
8546}
8547
8548
8549/**
8550 * Common 'bswap register' helper.
8551 */
8552FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
8553{
8554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8555 switch (pVCpu->iem.s.enmEffOpSize)
8556 {
8557 case IEMMODE_16BIT:
8558 IEM_MC_BEGIN(1, 0);
8559 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8560 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
8561 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
8562 IEM_MC_ADVANCE_RIP();
8563 IEM_MC_END();
8564 return VINF_SUCCESS;
8565
8566 case IEMMODE_32BIT:
8567 IEM_MC_BEGIN(1, 0);
8568 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8569 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8570 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8571 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
8572 IEM_MC_ADVANCE_RIP();
8573 IEM_MC_END();
8574 return VINF_SUCCESS;
8575
8576 case IEMMODE_64BIT:
8577 IEM_MC_BEGIN(1, 0);
8578 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8579 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8580 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
8581 IEM_MC_ADVANCE_RIP();
8582 IEM_MC_END();
8583 return VINF_SUCCESS;
8584
8585 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8586 }
8587}
8588
8589
8590/** Opcode 0x0f 0xc8. */
8591FNIEMOP_DEF(iemOp_bswap_rAX_r8)
8592{
8593 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
8594 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
8595 prefix. REX.B is the correct prefix it appears. For a parallel
8596 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
8597 IEMOP_HLP_MIN_486();
8598 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8599}
8600
8601
8602/** Opcode 0x0f 0xc9. */
8603FNIEMOP_DEF(iemOp_bswap_rCX_r9)
8604{
8605 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
8606 IEMOP_HLP_MIN_486();
8607 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8608}
8609
8610
8611/** Opcode 0x0f 0xca. */
8612FNIEMOP_DEF(iemOp_bswap_rDX_r10)
8613{
8614 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
8615 IEMOP_HLP_MIN_486();
8616 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8617}
8618
8619
8620/** Opcode 0x0f 0xcb. */
8621FNIEMOP_DEF(iemOp_bswap_rBX_r11)
8622{
8623 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
8624 IEMOP_HLP_MIN_486();
8625 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8626}
8627
8628
8629/** Opcode 0x0f 0xcc. */
8630FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8631{
8632 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8633 IEMOP_HLP_MIN_486();
8634 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8635}
8636
8637
8638/** Opcode 0x0f 0xcd. */
8639FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8640{
8641 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8642 IEMOP_HLP_MIN_486();
8643 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8644}
8645
8646
8647/** Opcode 0x0f 0xce. */
8648FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8649{
8650 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8651 IEMOP_HLP_MIN_486();
8652 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8653}
8654
8655
8656/** Opcode 0x0f 0xcf. */
8657FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8658{
8659 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8660 IEMOP_HLP_MIN_486();
8661 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8662}
8663
8664
8665/* Opcode 0x0f 0xd0 - invalid */
8666/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8667FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8668/* Opcode 0xf3 0x0f 0xd0 - invalid */
8669/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8670FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8671
8672/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8673FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8674/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8675FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8676/* Opcode 0xf3 0x0f 0xd1 - invalid */
8677/* Opcode 0xf2 0x0f 0xd1 - invalid */
8678
8679/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8680FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8681/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8682FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8683/* Opcode 0xf3 0x0f 0xd2 - invalid */
8684/* Opcode 0xf2 0x0f 0xd2 - invalid */
8685
8686/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8687FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8688/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8689FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8690/* Opcode 0xf3 0x0f 0xd3 - invalid */
8691/* Opcode 0xf2 0x0f 0xd3 - invalid */
8692
8693/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8694FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8695/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8696FNIEMOP_STUB(iemOp_paddq_Vx_W);
8697/* Opcode 0xf3 0x0f 0xd4 - invalid */
8698/* Opcode 0xf2 0x0f 0xd4 - invalid */
8699
8700/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8701FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8702/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8703FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8704/* Opcode 0xf3 0x0f 0xd5 - invalid */
8705/* Opcode 0xf2 0x0f 0xd5 - invalid */
8706
8707/* Opcode 0x0f 0xd6 - invalid */
8708
8709/**
8710 * @opcode 0xd6
8711 * @oppfx 0x66
8712 * @opcpuid sse2
8713 * @opgroup og_sse2_pcksclr_datamove
8714 * @opxcpttype none
8715 * @optest op1=-1 op2=2 -> op1=2
8716 * @optest op1=0 op2=-42 -> op1=-42
8717 */
8718FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8719{
8720 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8721 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8722 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8723 {
8724 /*
8725 * Register, register.
8726 */
8727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8728 IEM_MC_BEGIN(0, 2);
8729 IEM_MC_LOCAL(uint64_t, uSrc);
8730
8731 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8732 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8733
8734 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8735 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8736
8737 IEM_MC_ADVANCE_RIP();
8738 IEM_MC_END();
8739 }
8740 else
8741 {
8742 /*
8743 * Memory, register.
8744 */
8745 IEM_MC_BEGIN(0, 2);
8746 IEM_MC_LOCAL(uint64_t, uSrc);
8747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8748
8749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8751 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8752 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8753
8754 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8755 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8756
8757 IEM_MC_ADVANCE_RIP();
8758 IEM_MC_END();
8759 }
8760 return VINF_SUCCESS;
8761}
8762
8763
8764/**
8765 * @opcode 0xd6
8766 * @opcodesub 11 mr/reg
8767 * @oppfx f3
8768 * @opcpuid sse2
8769 * @opgroup og_sse2_simdint_datamove
8770 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8771 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8772 */
8773FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
8774{
8775 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8776 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8777 {
8778 /*
8779 * Register, register.
8780 */
8781 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8783 IEM_MC_BEGIN(0, 1);
8784 IEM_MC_LOCAL(uint64_t, uSrc);
8785
8786 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8787 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8788
8789 IEM_MC_FETCH_MREG_U64(uSrc, bRm & X86_MODRM_RM_MASK);
8790 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
8791 IEM_MC_FPU_TO_MMX_MODE();
8792
8793 IEM_MC_ADVANCE_RIP();
8794 IEM_MC_END();
8795 return VINF_SUCCESS;
8796 }
8797
8798 /**
8799 * @opdone
8800 * @opmnemonic udf30fd6mem
8801 * @opcode 0xd6
8802 * @opcodesub !11 mr/reg
8803 * @oppfx f3
8804 * @opunused intel-modrm
8805 * @opcpuid sse
8806 * @optest ->
8807 */
8808 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8809}
8810
8811
8812/**
8813 * @opcode 0xd6
8814 * @opcodesub 11 mr/reg
8815 * @oppfx f2
8816 * @opcpuid sse2
8817 * @opgroup og_sse2_simdint_datamove
8818 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8819 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8820 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
8821 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
8822 * @optest op1=-42 op2=0xfedcba9876543210
8823 * -> op1=0xfedcba9876543210 ftw=0xff
8824 */
8825FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
8826{
8827 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8828 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8829 {
8830 /*
8831 * Register, register.
8832 */
8833 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8835 IEM_MC_BEGIN(0, 1);
8836 IEM_MC_LOCAL(uint64_t, uSrc);
8837
8838 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8839 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8840
8841 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8842 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, uSrc);
8843 IEM_MC_FPU_TO_MMX_MODE();
8844
8845 IEM_MC_ADVANCE_RIP();
8846 IEM_MC_END();
8847 return VINF_SUCCESS;
8848 }
8849
8850 /**
8851 * @opdone
8852 * @opmnemonic udf20fd6mem
8853 * @opcode 0xd6
8854 * @opcodesub !11 mr/reg
8855 * @oppfx f2
8856 * @opunused intel-modrm
8857 * @opcpuid sse
8858 * @optest ->
8859 */
8860 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8861}
8862
8863/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
8864FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
8865{
8866 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8867 /** @todo testcase: Check that the instruction implicitly clears the high
8868 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8869 * and opcode modifications are made to work with the whole width (not
8870 * just 128). */
8871 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
8872 /* Docs says register only. */
8873 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8874 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8875 {
8876 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8877 IEM_MC_BEGIN(2, 0);
8878 IEM_MC_ARG(uint64_t *, pDst, 0);
8879 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8880 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8881 IEM_MC_PREPARE_FPU_USAGE();
8882 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8883 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8884 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8885 IEM_MC_ADVANCE_RIP();
8886 IEM_MC_END();
8887 return VINF_SUCCESS;
8888 }
8889 return IEMOP_RAISE_INVALID_OPCODE();
8890}
8891
8892/** Opcode 0x66 0x0f 0xd7 - */
8893FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
8894{
8895 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8896 /** @todo testcase: Check that the instruction implicitly clears the high
8897 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8898 * and opcode modifications are made to work with the whole width (not
8899 * just 128). */
8900 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
8901 /* Docs says register only. */
8902 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8903 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8904 {
8905 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8906 IEM_MC_BEGIN(2, 0);
8907 IEM_MC_ARG(uint64_t *, pDst, 0);
8908 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8909 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8910 IEM_MC_PREPARE_SSE_USAGE();
8911 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8912 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8913 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8914 IEM_MC_ADVANCE_RIP();
8915 IEM_MC_END();
8916 return VINF_SUCCESS;
8917 }
8918 return IEMOP_RAISE_INVALID_OPCODE();
8919}
8920
8921/* Opcode 0xf3 0x0f 0xd7 - invalid */
8922/* Opcode 0xf2 0x0f 0xd7 - invalid */
8923
8924
8925/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
8926FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
8927/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
8928FNIEMOP_STUB(iemOp_psubusb_Vx_W);
8929/* Opcode 0xf3 0x0f 0xd8 - invalid */
8930/* Opcode 0xf2 0x0f 0xd8 - invalid */
8931
8932/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
8933FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
8934/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
8935FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
8936/* Opcode 0xf3 0x0f 0xd9 - invalid */
8937/* Opcode 0xf2 0x0f 0xd9 - invalid */
8938
8939/** Opcode 0x0f 0xda - pminub Pq, Qq */
8940FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
8941/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
8942FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
8943/* Opcode 0xf3 0x0f 0xda - invalid */
8944/* Opcode 0xf2 0x0f 0xda - invalid */
8945
8946/** Opcode 0x0f 0xdb - pand Pq, Qq */
8947FNIEMOP_STUB(iemOp_pand_Pq_Qq);
8948/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
8949FNIEMOP_STUB(iemOp_pand_Vx_W);
8950/* Opcode 0xf3 0x0f 0xdb - invalid */
8951/* Opcode 0xf2 0x0f 0xdb - invalid */
8952
8953/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
8954FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
8955/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
8956FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
8957/* Opcode 0xf3 0x0f 0xdc - invalid */
8958/* Opcode 0xf2 0x0f 0xdc - invalid */
8959
8960/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
8961FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
8962/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
8963FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
8964/* Opcode 0xf3 0x0f 0xdd - invalid */
8965/* Opcode 0xf2 0x0f 0xdd - invalid */
8966
8967/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
8968FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
8969/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
8970FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
8971/* Opcode 0xf3 0x0f 0xde - invalid */
8972/* Opcode 0xf2 0x0f 0xde - invalid */
8973
8974/** Opcode 0x0f 0xdf - pandn Pq, Qq */
8975FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
8976/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
8977FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
8978/* Opcode 0xf3 0x0f 0xdf - invalid */
8979/* Opcode 0xf2 0x0f 0xdf - invalid */
8980
8981/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
8982FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
8983/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
8984FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
8985/* Opcode 0xf3 0x0f 0xe0 - invalid */
8986/* Opcode 0xf2 0x0f 0xe0 - invalid */
8987
8988/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
8989FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
8990/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
8991FNIEMOP_STUB(iemOp_psraw_Vx_W);
8992/* Opcode 0xf3 0x0f 0xe1 - invalid */
8993/* Opcode 0xf2 0x0f 0xe1 - invalid */
8994
8995/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
8996FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
8997/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
8998FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
8999/* Opcode 0xf3 0x0f 0xe2 - invalid */
9000/* Opcode 0xf2 0x0f 0xe2 - invalid */
9001
9002/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
9003FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
9004/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
9005FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
9006/* Opcode 0xf3 0x0f 0xe3 - invalid */
9007/* Opcode 0xf2 0x0f 0xe3 - invalid */
9008
9009/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
9010FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
9011/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
9012FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
9013/* Opcode 0xf3 0x0f 0xe4 - invalid */
9014/* Opcode 0xf2 0x0f 0xe4 - invalid */
9015
9016/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
9017FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
9018/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
9019FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
9020/* Opcode 0xf3 0x0f 0xe5 - invalid */
9021/* Opcode 0xf2 0x0f 0xe5 - invalid */
9022
9023/* Opcode 0x0f 0xe6 - invalid */
9024/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
9025FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
9026/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
9027FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
9028/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
9029FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
9030
9031
9032/**
9033 * @opcode 0xe7
9034 * @opcodesub !11 mr/reg
9035 * @oppfx none
9036 * @opcpuid sse
9037 * @opgroup og_sse1_cachect
9038 * @opxcpttype none
9039 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
9040 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
9041 */
9042FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
9043{
9044 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9045 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9046 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9047 {
9048 /* Register, memory. */
9049 IEM_MC_BEGIN(0, 2);
9050 IEM_MC_LOCAL(uint64_t, uSrc);
9051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9052
9053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9055 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
9056 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9057
9058 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9059 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9060 IEM_MC_FPU_TO_MMX_MODE();
9061
9062 IEM_MC_ADVANCE_RIP();
9063 IEM_MC_END();
9064 return VINF_SUCCESS;
9065 }
9066 /**
9067 * @opdone
9068 * @opmnemonic ud0fe7reg
9069 * @opcode 0xe7
9070 * @opcodesub 11 mr/reg
9071 * @oppfx none
9072 * @opunused immediate
9073 * @opcpuid sse
9074 * @optest ->
9075 */
9076 return IEMOP_RAISE_INVALID_OPCODE();
9077}
9078
9079/**
9080 * @opcode 0xe7
9081 * @opcodesub !11 mr/reg
9082 * @oppfx 0x66
9083 * @opcpuid sse2
9084 * @opgroup og_sse2_cachect
9085 * @opxcpttype 1
9086 * @optest op1=-1 op2=2 -> op1=2
9087 * @optest op1=0 op2=-42 -> op1=-42
9088 */
9089FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
9090{
9091 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9092 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9093 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9094 {
9095 /* Register, memory. */
9096 IEM_MC_BEGIN(0, 2);
9097 IEM_MC_LOCAL(RTUINT128U, uSrc);
9098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9099
9100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9102 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9103 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9104
9105 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9106 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9107
9108 IEM_MC_ADVANCE_RIP();
9109 IEM_MC_END();
9110 return VINF_SUCCESS;
9111 }
9112
9113 /**
9114 * @opdone
9115 * @opmnemonic ud660fe7reg
9116 * @opcode 0xe7
9117 * @opcodesub 11 mr/reg
9118 * @oppfx 0x66
9119 * @opunused immediate
9120 * @opcpuid sse
9121 * @optest ->
9122 */
9123 return IEMOP_RAISE_INVALID_OPCODE();
9124}
9125
9126/* Opcode 0xf3 0x0f 0xe7 - invalid */
9127/* Opcode 0xf2 0x0f 0xe7 - invalid */
9128
9129
9130/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
9131FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
9132/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
9133FNIEMOP_STUB(iemOp_psubsb_Vx_W);
9134/* Opcode 0xf3 0x0f 0xe8 - invalid */
9135/* Opcode 0xf2 0x0f 0xe8 - invalid */
9136
9137/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
9138FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
9139/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
9140FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
9141/* Opcode 0xf3 0x0f 0xe9 - invalid */
9142/* Opcode 0xf2 0x0f 0xe9 - invalid */
9143
9144/** Opcode 0x0f 0xea - pminsw Pq, Qq */
9145FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
9146/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
9147FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
9148/* Opcode 0xf3 0x0f 0xea - invalid */
9149/* Opcode 0xf2 0x0f 0xea - invalid */
9150
9151/** Opcode 0x0f 0xeb - por Pq, Qq */
9152FNIEMOP_STUB(iemOp_por_Pq_Qq);
9153/** Opcode 0x66 0x0f 0xeb - por Vx, W */
9154FNIEMOP_STUB(iemOp_por_Vx_W);
9155/* Opcode 0xf3 0x0f 0xeb - invalid */
9156/* Opcode 0xf2 0x0f 0xeb - invalid */
9157
9158/** Opcode 0x0f 0xec - paddsb Pq, Qq */
9159FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
9160/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
9161FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
9162/* Opcode 0xf3 0x0f 0xec - invalid */
9163/* Opcode 0xf2 0x0f 0xec - invalid */
9164
9165/** Opcode 0x0f 0xed - paddsw Pq, Qq */
9166FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
9167/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
9168FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
9169/* Opcode 0xf3 0x0f 0xed - invalid */
9170/* Opcode 0xf2 0x0f 0xed - invalid */
9171
9172/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
9173FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
9174/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
9175FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
9176/* Opcode 0xf3 0x0f 0xee - invalid */
9177/* Opcode 0xf2 0x0f 0xee - invalid */
9178
9179
9180/** Opcode 0x0f 0xef - pxor Pq, Qq */
9181FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
9182{
9183 IEMOP_MNEMONIC(pxor, "pxor");
9184 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
9185}
9186
9187/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
9188FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
9189{
9190 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
9191 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
9192}
9193
9194/* Opcode 0xf3 0x0f 0xef - invalid */
9195/* Opcode 0xf2 0x0f 0xef - invalid */
9196
9197/* Opcode 0x0f 0xf0 - invalid */
9198/* Opcode 0x66 0x0f 0xf0 - invalid */
9199/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
9200FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
9201
9202/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
9203FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
9204/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
9205FNIEMOP_STUB(iemOp_psllw_Vx_W);
9206/* Opcode 0xf2 0x0f 0xf1 - invalid */
9207
9208/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
9209FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
9210/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
9211FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
9212/* Opcode 0xf2 0x0f 0xf2 - invalid */
9213
9214/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
9215FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
9216/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
9217FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
9218/* Opcode 0xf2 0x0f 0xf3 - invalid */
9219
9220/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
9221FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
9222/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
9223FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
9224/* Opcode 0xf2 0x0f 0xf4 - invalid */
9225
9226/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
9227FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
9228/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
9229FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
9230/* Opcode 0xf2 0x0f 0xf5 - invalid */
9231
9232/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
9233FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
9234/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
9235FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
9236/* Opcode 0xf2 0x0f 0xf6 - invalid */
9237
9238/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
9239FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
9240/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
9241FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
9242/* Opcode 0xf2 0x0f 0xf7 - invalid */
9243
9244/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
9245FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
9246/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
9247FNIEMOP_STUB(iemOp_psubb_Vx_W);
9248/* Opcode 0xf2 0x0f 0xf8 - invalid */
9249
9250/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
9251FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
9252/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
9253FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
9254/* Opcode 0xf2 0x0f 0xf9 - invalid */
9255
9256/** Opcode 0x0f 0xfa - psubd Pq, Qq */
9257FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
9258/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
9259FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
9260/* Opcode 0xf2 0x0f 0xfa - invalid */
9261
9262/** Opcode 0x0f 0xfb - psubq Pq, Qq */
9263FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
9264/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
9265FNIEMOP_STUB(iemOp_psubq_Vx_W);
9266/* Opcode 0xf2 0x0f 0xfb - invalid */
9267
9268/** Opcode 0x0f 0xfc - paddb Pq, Qq */
9269FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
9270/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
9271FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
9272/* Opcode 0xf2 0x0f 0xfc - invalid */
9273
9274/** Opcode 0x0f 0xfd - paddw Pq, Qq */
9275FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
9276/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
9277FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
9278/* Opcode 0xf2 0x0f 0xfd - invalid */
9279
9280/** Opcode 0x0f 0xfe - paddd Pq, Qq */
9281FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
9282/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
9283FNIEMOP_STUB(iemOp_paddd_Vx_W);
9284/* Opcode 0xf2 0x0f 0xfe - invalid */
9285
9286
9287/** Opcode **** 0x0f 0xff - UD0 */
9288FNIEMOP_DEF(iemOp_ud0)
9289{
9290 IEMOP_MNEMONIC(ud0, "ud0");
9291 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
9292 {
9293 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
9294#ifndef TST_IEM_CHECK_MC
9295 RTGCPTR GCPtrEff;
9296 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
9297 if (rcStrict != VINF_SUCCESS)
9298 return rcStrict;
9299#endif
9300 IEMOP_HLP_DONE_DECODING();
9301 }
9302 return IEMOP_RAISE_INVALID_OPCODE();
9303}
9304
9305
9306
9307/**
9308 * Two byte opcode map, first byte 0x0f.
9309 *
9310 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
9311 * check if it needs updating as well when making changes.
9312 */
9313IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
9314{
9315 /* no prefix, 066h prefix f3h prefix, f2h prefix */
9316 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
9317 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
9318 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
9319 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
9320 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
9321 /* 0x05 */ IEMOP_X4(iemOp_syscall),
9322 /* 0x06 */ IEMOP_X4(iemOp_clts),
9323 /* 0x07 */ IEMOP_X4(iemOp_sysret),
9324 /* 0x08 */ IEMOP_X4(iemOp_invd),
9325 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
9326 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
9327 /* 0x0b */ IEMOP_X4(iemOp_ud2),
9328 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
9329 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
9330 /* 0x0e */ IEMOP_X4(iemOp_femms),
9331 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
9332
9333 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
9334 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
9335 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
9336 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9337 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9338 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9339 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
9340 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9341 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
9342 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
9343 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
9344 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
9345 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
9346 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
9347 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
9348 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
9349
9350 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
9351 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
9352 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
9353 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
9354 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
9355 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9356 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
9357 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9358 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9359 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9360 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
9361 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9362 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
9363 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
9364 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9365 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9366
9367 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
9368 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
9369 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
9370 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
9371 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
9372 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
9373 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
9374 /* 0x37 */ IEMOP_X4(iemOp_getsec),
9375 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
9376 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9377 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
9378 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9379 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9380 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9381 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9382 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9383
9384 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
9385 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
9386 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
9387 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
9388 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
9389 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
9390 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
9391 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
9392 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
9393 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
9394 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
9395 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
9396 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
9397 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
9398 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
9399 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
9400
9401 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9402 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
9403 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
9404 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
9405 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9406 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9407 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9408 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9409 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
9410 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
9411 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
9412 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
9413 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
9414 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
9415 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
9416 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
9417
9418 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9419 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9420 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9421 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9422 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9423 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9424 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9425 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9426 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9427 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9428 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9429 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9430 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9431 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9432 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9433 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
9434
9435 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
9436 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
9437 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
9438 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
9439 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9440 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9441 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9442 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9443
9444 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9445 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9446 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9447 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9448 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
9449 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
9450 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
9451 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
9452
9453 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
9454 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
9455 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
9456 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
9457 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
9458 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
9459 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
9460 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
9461 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
9462 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
9463 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
9464 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
9465 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
9466 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
9467 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
9468 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
9469
9470 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
9471 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
9472 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
9473 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
9474 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
9475 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
9476 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
9477 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
9478 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
9479 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
9480 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
9481 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
9482 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
9483 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
9484 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
9485 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
9486
9487 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
9488 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
9489 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
9490 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
9491 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
9492 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
9493 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
9494 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
9495 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
9496 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
9497 /* 0xaa */ IEMOP_X4(iemOp_rsm),
9498 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
9499 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
9500 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
9501 /* 0xae */ IEMOP_X4(iemOp_Grp15),
9502 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
9503
9504 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
9505 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
9506 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
9507 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
9508 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
9509 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
9510 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
9511 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
9512 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
9513 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
9514 /* 0xba */ IEMOP_X4(iemOp_Grp8),
9515 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
9516 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
9517 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
9518 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
9519 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
9520
9521 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
9522 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
9523 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
9524 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9525 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9526 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9527 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9528 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
9529 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
9530 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
9531 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
9532 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
9533 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
9534 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
9535 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
9536 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
9537
9538 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
9539 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9540 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9541 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9542 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9543 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9544 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
9545 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9546 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9547 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9548 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9549 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9550 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9551 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9552 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9553 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9554
9555 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9556 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9557 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9558 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9559 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9560 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9561 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
9562 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9563 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9564 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9565 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9566 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9567 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9568 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9569 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9570 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9571
9572 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
9573 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9574 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9575 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9576 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9577 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9578 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9579 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9580 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9581 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9582 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9583 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9584 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9585 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9586 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9587 /* 0xff */ IEMOP_X4(iemOp_ud0),
9588};
9589AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
9590
9591/** @} */
9592
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette