VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 95453

Last change on this file since 95453 was 95453, checked in by vboxsync, 2 years ago

VMM/IEM: [v]pcmpeqq and [v]pcmpgtq. bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 356.7 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 95453 2022-06-30 09:43:46Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27
28/**
29 * Common worker for MMX instructions on the form:
30 * pxxx mm1, mm2/mem64
31 */
32FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
33{
34 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
35 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
36 {
37 /*
38 * Register, register.
39 */
40 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
41 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
42 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
43 IEM_MC_BEGIN(2, 0);
44 IEM_MC_ARG(uint64_t *, pDst, 0);
45 IEM_MC_ARG(uint64_t const *, pSrc, 1);
46 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
47 IEM_MC_PREPARE_FPU_USAGE();
48 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
49 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
50 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
51 IEM_MC_FPU_TO_MMX_MODE();
52 IEM_MC_ADVANCE_RIP();
53 IEM_MC_END();
54 }
55 else
56 {
57 /*
58 * Register, memory.
59 */
60 IEM_MC_BEGIN(2, 2);
61 IEM_MC_ARG(uint64_t *, pDst, 0);
62 IEM_MC_LOCAL(uint64_t, uSrc);
63 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
64 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
65
66 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
67 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
68 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
69 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
70
71 IEM_MC_PREPARE_FPU_USAGE();
72 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
73 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
74 IEM_MC_FPU_TO_MMX_MODE();
75
76 IEM_MC_ADVANCE_RIP();
77 IEM_MC_END();
78 }
79 return VINF_SUCCESS;
80}
81
82
83/**
84 * Common worker for SSE2 instructions on the forms:
85 * pxxx xmm1, xmm2/mem128
86 *
87 * Proper alignment of the 128-bit operand is enforced.
88 * Exceptions type 4. SSE2 cpuid checks.
89 *
90 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
91 */
92FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
93{
94 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
95 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
96 {
97 /*
98 * Register, register.
99 */
100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
101 IEM_MC_BEGIN(2, 0);
102 IEM_MC_ARG(PRTUINT128U, pDst, 0);
103 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
104 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
105 IEM_MC_PREPARE_SSE_USAGE();
106 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
107 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
108 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
109 IEM_MC_ADVANCE_RIP();
110 IEM_MC_END();
111 }
112 else
113 {
114 /*
115 * Register, memory.
116 */
117 IEM_MC_BEGIN(2, 2);
118 IEM_MC_ARG(PRTUINT128U, pDst, 0);
119 IEM_MC_LOCAL(RTUINT128U, uSrc);
120 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
121 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
122
123 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
125 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
126 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
127
128 IEM_MC_PREPARE_SSE_USAGE();
129 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
130 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
131
132 IEM_MC_ADVANCE_RIP();
133 IEM_MC_END();
134 }
135 return VINF_SUCCESS;
136}
137
138
139/** Opcode 0x0f 0x00 /0. */
140FNIEMOPRM_DEF(iemOp_Grp6_sldt)
141{
142 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
143 IEMOP_HLP_MIN_286();
144 IEMOP_HLP_NO_REAL_OR_V86_MODE();
145
146 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
147 {
148 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
149 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
150 }
151
152 /* Ignore operand size here, memory refs are always 16-bit. */
153 IEM_MC_BEGIN(2, 0);
154 IEM_MC_ARG(uint16_t, iEffSeg, 0);
155 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
157 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
158 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
159 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
160 IEM_MC_END();
161 return VINF_SUCCESS;
162}
163
164
165/** Opcode 0x0f 0x00 /1. */
166FNIEMOPRM_DEF(iemOp_Grp6_str)
167{
168 IEMOP_MNEMONIC(str, "str Rv/Mw");
169 IEMOP_HLP_MIN_286();
170 IEMOP_HLP_NO_REAL_OR_V86_MODE();
171
172
173 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
174 {
175 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
176 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
177 }
178
179 /* Ignore operand size here, memory refs are always 16-bit. */
180 IEM_MC_BEGIN(2, 0);
181 IEM_MC_ARG(uint16_t, iEffSeg, 0);
182 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
184 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
185 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
186 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
187 IEM_MC_END();
188 return VINF_SUCCESS;
189}
190
191
192/** Opcode 0x0f 0x00 /2. */
193FNIEMOPRM_DEF(iemOp_Grp6_lldt)
194{
195 IEMOP_MNEMONIC(lldt, "lldt Ew");
196 IEMOP_HLP_MIN_286();
197 IEMOP_HLP_NO_REAL_OR_V86_MODE();
198
199 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
200 {
201 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
202 IEM_MC_BEGIN(1, 0);
203 IEM_MC_ARG(uint16_t, u16Sel, 0);
204 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
205 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
206 IEM_MC_END();
207 }
208 else
209 {
210 IEM_MC_BEGIN(1, 1);
211 IEM_MC_ARG(uint16_t, u16Sel, 0);
212 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
214 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
215 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
216 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
217 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
218 IEM_MC_END();
219 }
220 return VINF_SUCCESS;
221}
222
223
224/** Opcode 0x0f 0x00 /3. */
225FNIEMOPRM_DEF(iemOp_Grp6_ltr)
226{
227 IEMOP_MNEMONIC(ltr, "ltr Ew");
228 IEMOP_HLP_MIN_286();
229 IEMOP_HLP_NO_REAL_OR_V86_MODE();
230
231 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
232 {
233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
234 IEM_MC_BEGIN(1, 0);
235 IEM_MC_ARG(uint16_t, u16Sel, 0);
236 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
237 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
238 IEM_MC_END();
239 }
240 else
241 {
242 IEM_MC_BEGIN(1, 1);
243 IEM_MC_ARG(uint16_t, u16Sel, 0);
244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
247 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
248 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
249 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
250 IEM_MC_END();
251 }
252 return VINF_SUCCESS;
253}
254
255
256/** Opcode 0x0f 0x00 /3. */
257FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
258{
259 IEMOP_HLP_MIN_286();
260 IEMOP_HLP_NO_REAL_OR_V86_MODE();
261
262 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
263 {
264 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
265 IEM_MC_BEGIN(2, 0);
266 IEM_MC_ARG(uint16_t, u16Sel, 0);
267 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
268 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
269 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
270 IEM_MC_END();
271 }
272 else
273 {
274 IEM_MC_BEGIN(2, 1);
275 IEM_MC_ARG(uint16_t, u16Sel, 0);
276 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
277 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
278 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
279 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
280 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
281 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
282 IEM_MC_END();
283 }
284 return VINF_SUCCESS;
285}
286
287
288/** Opcode 0x0f 0x00 /4. */
289FNIEMOPRM_DEF(iemOp_Grp6_verr)
290{
291 IEMOP_MNEMONIC(verr, "verr Ew");
292 IEMOP_HLP_MIN_286();
293 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
294}
295
296
297/** Opcode 0x0f 0x00 /5. */
298FNIEMOPRM_DEF(iemOp_Grp6_verw)
299{
300 IEMOP_MNEMONIC(verw, "verw Ew");
301 IEMOP_HLP_MIN_286();
302 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
303}
304
305
306/**
307 * Group 6 jump table.
308 */
309IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
310{
311 iemOp_Grp6_sldt,
312 iemOp_Grp6_str,
313 iemOp_Grp6_lldt,
314 iemOp_Grp6_ltr,
315 iemOp_Grp6_verr,
316 iemOp_Grp6_verw,
317 iemOp_InvalidWithRM,
318 iemOp_InvalidWithRM
319};
320
321/** Opcode 0x0f 0x00. */
322FNIEMOP_DEF(iemOp_Grp6)
323{
324 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
325 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
326}
327
328
329/** Opcode 0x0f 0x01 /0. */
330FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
331{
332 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
333 IEMOP_HLP_MIN_286();
334 IEMOP_HLP_64BIT_OP_SIZE();
335 IEM_MC_BEGIN(2, 1);
336 IEM_MC_ARG(uint8_t, iEffSeg, 0);
337 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
340 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
341 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
342 IEM_MC_END();
343 return VINF_SUCCESS;
344}
345
346
347/** Opcode 0x0f 0x01 /0. */
348FNIEMOP_DEF(iemOp_Grp7_vmcall)
349{
350 IEMOP_MNEMONIC(vmcall, "vmcall");
351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
352
353 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
354 want all hypercalls regardless of instruction used, and if a
355 hypercall isn't handled by GIM or HMSvm will raise an #UD.
356 (NEM/win makes ASSUMPTIONS about this behavior.) */
357 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
358}
359
360
361/** Opcode 0x0f 0x01 /0. */
362#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
363FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
364{
365 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
366 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
367 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
368 IEMOP_HLP_DONE_DECODING();
369 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
370}
371#else
372FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
373{
374 IEMOP_BITCH_ABOUT_STUB();
375 return IEMOP_RAISE_INVALID_OPCODE();
376}
377#endif
378
379
380/** Opcode 0x0f 0x01 /0. */
381#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
382FNIEMOP_DEF(iemOp_Grp7_vmresume)
383{
384 IEMOP_MNEMONIC(vmresume, "vmresume");
385 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
386 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
387 IEMOP_HLP_DONE_DECODING();
388 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume);
389}
390#else
391FNIEMOP_DEF(iemOp_Grp7_vmresume)
392{
393 IEMOP_BITCH_ABOUT_STUB();
394 return IEMOP_RAISE_INVALID_OPCODE();
395}
396#endif
397
398
399/** Opcode 0x0f 0x01 /0. */
400#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
401FNIEMOP_DEF(iemOp_Grp7_vmxoff)
402{
403 IEMOP_MNEMONIC(vmxoff, "vmxoff");
404 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
405 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
406 IEMOP_HLP_DONE_DECODING();
407 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
408}
409#else
410FNIEMOP_DEF(iemOp_Grp7_vmxoff)
411{
412 IEMOP_BITCH_ABOUT_STUB();
413 return IEMOP_RAISE_INVALID_OPCODE();
414}
415#endif
416
417
418/** Opcode 0x0f 0x01 /1. */
419FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
420{
421 IEMOP_MNEMONIC(sidt, "sidt Ms");
422 IEMOP_HLP_MIN_286();
423 IEMOP_HLP_64BIT_OP_SIZE();
424 IEM_MC_BEGIN(2, 1);
425 IEM_MC_ARG(uint8_t, iEffSeg, 0);
426 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
429 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
430 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
431 IEM_MC_END();
432 return VINF_SUCCESS;
433}
434
435
436/** Opcode 0x0f 0x01 /1. */
437FNIEMOP_DEF(iemOp_Grp7_monitor)
438{
439 IEMOP_MNEMONIC(monitor, "monitor");
440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
441 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
442}
443
444
445/** Opcode 0x0f 0x01 /1. */
446FNIEMOP_DEF(iemOp_Grp7_mwait)
447{
448 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
450 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
451}
452
453
454/** Opcode 0x0f 0x01 /2. */
455FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
456{
457 IEMOP_MNEMONIC(lgdt, "lgdt");
458 IEMOP_HLP_64BIT_OP_SIZE();
459 IEM_MC_BEGIN(3, 1);
460 IEM_MC_ARG(uint8_t, iEffSeg, 0);
461 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
462 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
465 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
466 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
467 IEM_MC_END();
468 return VINF_SUCCESS;
469}
470
471
472/** Opcode 0x0f 0x01 0xd0. */
473FNIEMOP_DEF(iemOp_Grp7_xgetbv)
474{
475 IEMOP_MNEMONIC(xgetbv, "xgetbv");
476 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
477 {
478 /** @todo r=ramshankar: We should use
479 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
480 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
481 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
482 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
483 }
484 return IEMOP_RAISE_INVALID_OPCODE();
485}
486
487
488/** Opcode 0x0f 0x01 0xd1. */
489FNIEMOP_DEF(iemOp_Grp7_xsetbv)
490{
491 IEMOP_MNEMONIC(xsetbv, "xsetbv");
492 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
493 {
494 /** @todo r=ramshankar: We should use
495 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
496 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
497 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
498 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
499 }
500 return IEMOP_RAISE_INVALID_OPCODE();
501}
502
503
504/** Opcode 0x0f 0x01 /3. */
505FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
506{
507 IEMOP_MNEMONIC(lidt, "lidt");
508 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
509 ? IEMMODE_64BIT
510 : pVCpu->iem.s.enmEffOpSize;
511 IEM_MC_BEGIN(3, 1);
512 IEM_MC_ARG(uint8_t, iEffSeg, 0);
513 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
514 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
517 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
518 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
519 IEM_MC_END();
520 return VINF_SUCCESS;
521}
522
523
524/** Opcode 0x0f 0x01 0xd8. */
525#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
526FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
527{
528 IEMOP_MNEMONIC(vmrun, "vmrun");
529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
530 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
531}
532#else
533FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
534#endif
535
536/** Opcode 0x0f 0x01 0xd9. */
537FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
538{
539 IEMOP_MNEMONIC(vmmcall, "vmmcall");
540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
541
542 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
543 want all hypercalls regardless of instruction used, and if a
544 hypercall isn't handled by GIM or HMSvm will raise an #UD.
545 (NEM/win makes ASSUMPTIONS about this behavior.) */
546 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
547}
548
549/** Opcode 0x0f 0x01 0xda. */
550#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
551FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
552{
553 IEMOP_MNEMONIC(vmload, "vmload");
554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
555 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
556}
557#else
558FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
559#endif
560
561
562/** Opcode 0x0f 0x01 0xdb. */
563#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
564FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
565{
566 IEMOP_MNEMONIC(vmsave, "vmsave");
567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
568 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
569}
570#else
571FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
572#endif
573
574
575/** Opcode 0x0f 0x01 0xdc. */
576#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
577FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
578{
579 IEMOP_MNEMONIC(stgi, "stgi");
580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
581 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
582}
583#else
584FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
585#endif
586
587
588/** Opcode 0x0f 0x01 0xdd. */
589#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
590FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
591{
592 IEMOP_MNEMONIC(clgi, "clgi");
593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
594 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
595}
596#else
597FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
598#endif
599
600
601/** Opcode 0x0f 0x01 0xdf. */
602#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
603FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
604{
605 IEMOP_MNEMONIC(invlpga, "invlpga");
606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
607 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
608}
609#else
610FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
611#endif
612
613
614/** Opcode 0x0f 0x01 0xde. */
615#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
616FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
617{
618 IEMOP_MNEMONIC(skinit, "skinit");
619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
620 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
621}
622#else
623FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
624#endif
625
626
627/** Opcode 0x0f 0x01 /4. */
628FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
629{
630 IEMOP_MNEMONIC(smsw, "smsw");
631 IEMOP_HLP_MIN_286();
632 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
633 {
634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
635 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
636 }
637
638 /* Ignore operand size here, memory refs are always 16-bit. */
639 IEM_MC_BEGIN(2, 0);
640 IEM_MC_ARG(uint16_t, iEffSeg, 0);
641 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
644 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
645 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
646 IEM_MC_END();
647 return VINF_SUCCESS;
648}
649
650
651/** Opcode 0x0f 0x01 /6. */
652FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
653{
654 /* The operand size is effectively ignored, all is 16-bit and only the
655 lower 3-bits are used. */
656 IEMOP_MNEMONIC(lmsw, "lmsw");
657 IEMOP_HLP_MIN_286();
658 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
659 {
660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
661 IEM_MC_BEGIN(2, 0);
662 IEM_MC_ARG(uint16_t, u16Tmp, 0);
663 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
664 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
665 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
666 IEM_MC_END();
667 }
668 else
669 {
670 IEM_MC_BEGIN(2, 0);
671 IEM_MC_ARG(uint16_t, u16Tmp, 0);
672 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
675 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
676 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
677 IEM_MC_END();
678 }
679 return VINF_SUCCESS;
680}
681
682
683/** Opcode 0x0f 0x01 /7. */
684FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
685{
686 IEMOP_MNEMONIC(invlpg, "invlpg");
687 IEMOP_HLP_MIN_486();
688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
689 IEM_MC_BEGIN(1, 1);
690 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
692 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
693 IEM_MC_END();
694 return VINF_SUCCESS;
695}
696
697
698/** Opcode 0x0f 0x01 /7. */
699FNIEMOP_DEF(iemOp_Grp7_swapgs)
700{
701 IEMOP_MNEMONIC(swapgs, "swapgs");
702 IEMOP_HLP_ONLY_64BIT();
703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
704 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
705}
706
707
708/** Opcode 0x0f 0x01 /7. */
709FNIEMOP_DEF(iemOp_Grp7_rdtscp)
710{
711 IEMOP_MNEMONIC(rdtscp, "rdtscp");
712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
713 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
714}
715
716
717/**
718 * Group 7 jump table, memory variant.
719 */
720IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
721{
722 iemOp_Grp7_sgdt,
723 iemOp_Grp7_sidt,
724 iemOp_Grp7_lgdt,
725 iemOp_Grp7_lidt,
726 iemOp_Grp7_smsw,
727 iemOp_InvalidWithRM,
728 iemOp_Grp7_lmsw,
729 iemOp_Grp7_invlpg
730};
731
732
733/** Opcode 0x0f 0x01. */
734FNIEMOP_DEF(iemOp_Grp7)
735{
736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
737 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
738 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
739
740 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
741 {
742 case 0:
743 switch (bRm & X86_MODRM_RM_MASK)
744 {
745 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
746 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
747 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
748 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
749 }
750 return IEMOP_RAISE_INVALID_OPCODE();
751
752 case 1:
753 switch (bRm & X86_MODRM_RM_MASK)
754 {
755 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
756 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
757 }
758 return IEMOP_RAISE_INVALID_OPCODE();
759
760 case 2:
761 switch (bRm & X86_MODRM_RM_MASK)
762 {
763 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
764 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
765 }
766 return IEMOP_RAISE_INVALID_OPCODE();
767
768 case 3:
769 switch (bRm & X86_MODRM_RM_MASK)
770 {
771 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
772 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
773 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
774 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
775 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
776 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
777 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
778 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
779 IEM_NOT_REACHED_DEFAULT_CASE_RET();
780 }
781
782 case 4:
783 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
784
785 case 5:
786 return IEMOP_RAISE_INVALID_OPCODE();
787
788 case 6:
789 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
790
791 case 7:
792 switch (bRm & X86_MODRM_RM_MASK)
793 {
794 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
795 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
796 }
797 return IEMOP_RAISE_INVALID_OPCODE();
798
799 IEM_NOT_REACHED_DEFAULT_CASE_RET();
800 }
801}
802
803/** Opcode 0x0f 0x00 /3. */
804FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
805{
806 IEMOP_HLP_NO_REAL_OR_V86_MODE();
807 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
808
809 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
810 {
811 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
812 switch (pVCpu->iem.s.enmEffOpSize)
813 {
814 case IEMMODE_16BIT:
815 {
816 IEM_MC_BEGIN(3, 0);
817 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
818 IEM_MC_ARG(uint16_t, u16Sel, 1);
819 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
820
821 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
822 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
823 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
824
825 IEM_MC_END();
826 return VINF_SUCCESS;
827 }
828
829 case IEMMODE_32BIT:
830 case IEMMODE_64BIT:
831 {
832 IEM_MC_BEGIN(3, 0);
833 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
834 IEM_MC_ARG(uint16_t, u16Sel, 1);
835 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
836
837 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
838 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
839 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
840
841 IEM_MC_END();
842 return VINF_SUCCESS;
843 }
844
845 IEM_NOT_REACHED_DEFAULT_CASE_RET();
846 }
847 }
848 else
849 {
850 switch (pVCpu->iem.s.enmEffOpSize)
851 {
852 case IEMMODE_16BIT:
853 {
854 IEM_MC_BEGIN(3, 1);
855 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
856 IEM_MC_ARG(uint16_t, u16Sel, 1);
857 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
859
860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
861 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
862
863 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
864 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
865 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
866
867 IEM_MC_END();
868 return VINF_SUCCESS;
869 }
870
871 case IEMMODE_32BIT:
872 case IEMMODE_64BIT:
873 {
874 IEM_MC_BEGIN(3, 1);
875 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
876 IEM_MC_ARG(uint16_t, u16Sel, 1);
877 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
879
880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
881 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
882/** @todo testcase: make sure it's a 16-bit read. */
883
884 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
885 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
886 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
887
888 IEM_MC_END();
889 return VINF_SUCCESS;
890 }
891
892 IEM_NOT_REACHED_DEFAULT_CASE_RET();
893 }
894 }
895}
896
897
898
899/** Opcode 0x0f 0x02. */
900FNIEMOP_DEF(iemOp_lar_Gv_Ew)
901{
902 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
903 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
904}
905
906
907/** Opcode 0x0f 0x03. */
908FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
909{
910 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
911 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
912}
913
914
915/** Opcode 0x0f 0x05. */
916FNIEMOP_DEF(iemOp_syscall)
917{
918 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
920 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
921}
922
923
924/** Opcode 0x0f 0x06. */
925FNIEMOP_DEF(iemOp_clts)
926{
927 IEMOP_MNEMONIC(clts, "clts");
928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
929 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
930}
931
932
933/** Opcode 0x0f 0x07. */
934FNIEMOP_DEF(iemOp_sysret)
935{
936 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
938 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
939}
940
941
942/** Opcode 0x0f 0x08. */
943FNIEMOP_DEF(iemOp_invd)
944{
945 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
946 IEMOP_HLP_MIN_486();
947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
948 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
949}
950
951
952/** Opcode 0x0f 0x09. */
953FNIEMOP_DEF(iemOp_wbinvd)
954{
955 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
956 IEMOP_HLP_MIN_486();
957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
958 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
959}
960
961
962/** Opcode 0x0f 0x0b. */
963FNIEMOP_DEF(iemOp_ud2)
964{
965 IEMOP_MNEMONIC(ud2, "ud2");
966 return IEMOP_RAISE_INVALID_OPCODE();
967}
968
969/** Opcode 0x0f 0x0d. */
970FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
971{
972 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
973 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
974 {
975 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
976 return IEMOP_RAISE_INVALID_OPCODE();
977 }
978
979 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
980 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
981 {
982 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
983 return IEMOP_RAISE_INVALID_OPCODE();
984 }
985
986 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
987 {
988 case 2: /* Aliased to /0 for the time being. */
989 case 4: /* Aliased to /0 for the time being. */
990 case 5: /* Aliased to /0 for the time being. */
991 case 6: /* Aliased to /0 for the time being. */
992 case 7: /* Aliased to /0 for the time being. */
993 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
994 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
995 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
996 IEM_NOT_REACHED_DEFAULT_CASE_RET();
997 }
998
999 IEM_MC_BEGIN(0, 1);
1000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1001 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1003 /* Currently a NOP. */
1004 NOREF(GCPtrEffSrc);
1005 IEM_MC_ADVANCE_RIP();
1006 IEM_MC_END();
1007 return VINF_SUCCESS;
1008}
1009
1010
1011/** Opcode 0x0f 0x0e. */
1012FNIEMOP_DEF(iemOp_femms)
1013{
1014 IEMOP_MNEMONIC(femms, "femms");
1015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1016
1017 IEM_MC_BEGIN(0,0);
1018 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
1019 IEM_MC_MAYBE_RAISE_FPU_XCPT();
1020 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
1021 IEM_MC_FPU_FROM_MMX_MODE();
1022 IEM_MC_ADVANCE_RIP();
1023 IEM_MC_END();
1024 return VINF_SUCCESS;
1025}
1026
1027
1028/** Opcode 0x0f 0x0f. */
1029FNIEMOP_DEF(iemOp_3Dnow)
1030{
1031 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1032 {
1033 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1034 return IEMOP_RAISE_INVALID_OPCODE();
1035 }
1036
1037#ifdef IEM_WITH_3DNOW
1038 /* This is pretty sparse, use switch instead of table. */
1039 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1040 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
1041#else
1042 IEMOP_BITCH_ABOUT_STUB();
1043 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1044#endif
1045}
1046
1047
1048/**
1049 * @opcode 0x10
1050 * @oppfx none
1051 * @opcpuid sse
1052 * @opgroup og_sse_simdfp_datamove
1053 * @opxcpttype 4UA
1054 * @optest op1=1 op2=2 -> op1=2
1055 * @optest op1=0 op2=-22 -> op1=-22
1056 */
1057FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1058{
1059 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1061 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1062 {
1063 /*
1064 * Register, register.
1065 */
1066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1067 IEM_MC_BEGIN(0, 0);
1068 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1069 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1070 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1071 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1072 IEM_MC_ADVANCE_RIP();
1073 IEM_MC_END();
1074 }
1075 else
1076 {
1077 /*
1078 * Memory, register.
1079 */
1080 IEM_MC_BEGIN(0, 2);
1081 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1082 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1083
1084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1086 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1087 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1088
1089 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1090 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1091
1092 IEM_MC_ADVANCE_RIP();
1093 IEM_MC_END();
1094 }
1095 return VINF_SUCCESS;
1096
1097}
1098
1099
1100/**
1101 * @opcode 0x10
1102 * @oppfx 0x66
1103 * @opcpuid sse2
1104 * @opgroup og_sse2_pcksclr_datamove
1105 * @opxcpttype 4UA
1106 * @optest op1=1 op2=2 -> op1=2
1107 * @optest op1=0 op2=-42 -> op1=-42
1108 */
1109FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1110{
1111 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1113 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1114 {
1115 /*
1116 * Register, register.
1117 */
1118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1119 IEM_MC_BEGIN(0, 0);
1120 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1121 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1122 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1123 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1124 IEM_MC_ADVANCE_RIP();
1125 IEM_MC_END();
1126 }
1127 else
1128 {
1129 /*
1130 * Memory, register.
1131 */
1132 IEM_MC_BEGIN(0, 2);
1133 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1135
1136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1138 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1139 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1140
1141 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1142 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1143
1144 IEM_MC_ADVANCE_RIP();
1145 IEM_MC_END();
1146 }
1147 return VINF_SUCCESS;
1148}
1149
1150
1151/**
1152 * @opcode 0x10
1153 * @oppfx 0xf3
1154 * @opcpuid sse
1155 * @opgroup og_sse_simdfp_datamove
1156 * @opxcpttype 5
1157 * @optest op1=1 op2=2 -> op1=2
1158 * @optest op1=0 op2=-22 -> op1=-22
1159 */
1160FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1161{
1162 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1163 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1164 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1165 {
1166 /*
1167 * Register, register.
1168 */
1169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1170 IEM_MC_BEGIN(0, 1);
1171 IEM_MC_LOCAL(uint32_t, uSrc);
1172
1173 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1174 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1175 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1176 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1177
1178 IEM_MC_ADVANCE_RIP();
1179 IEM_MC_END();
1180 }
1181 else
1182 {
1183 /*
1184 * Memory, register.
1185 */
1186 IEM_MC_BEGIN(0, 2);
1187 IEM_MC_LOCAL(uint32_t, uSrc);
1188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1189
1190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1192 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1193 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1194
1195 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1196 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1197
1198 IEM_MC_ADVANCE_RIP();
1199 IEM_MC_END();
1200 }
1201 return VINF_SUCCESS;
1202}
1203
1204
1205/**
1206 * @opcode 0x10
1207 * @oppfx 0xf2
1208 * @opcpuid sse2
1209 * @opgroup og_sse2_pcksclr_datamove
1210 * @opxcpttype 5
1211 * @optest op1=1 op2=2 -> op1=2
1212 * @optest op1=0 op2=-42 -> op1=-42
1213 */
1214FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1215{
1216 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1217 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1218 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1219 {
1220 /*
1221 * Register, register.
1222 */
1223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1224 IEM_MC_BEGIN(0, 1);
1225 IEM_MC_LOCAL(uint64_t, uSrc);
1226
1227 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1228 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1229 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1230 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1231
1232 IEM_MC_ADVANCE_RIP();
1233 IEM_MC_END();
1234 }
1235 else
1236 {
1237 /*
1238 * Memory, register.
1239 */
1240 IEM_MC_BEGIN(0, 2);
1241 IEM_MC_LOCAL(uint64_t, uSrc);
1242 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1243
1244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1246 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1247 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1248
1249 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1250 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1251
1252 IEM_MC_ADVANCE_RIP();
1253 IEM_MC_END();
1254 }
1255 return VINF_SUCCESS;
1256}
1257
1258
1259/**
1260 * @opcode 0x11
1261 * @oppfx none
1262 * @opcpuid sse
1263 * @opgroup og_sse_simdfp_datamove
1264 * @opxcpttype 4UA
1265 * @optest op1=1 op2=2 -> op1=2
1266 * @optest op1=0 op2=-42 -> op1=-42
1267 */
1268FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1269{
1270 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1271 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1272 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1273 {
1274 /*
1275 * Register, register.
1276 */
1277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1278 IEM_MC_BEGIN(0, 0);
1279 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1280 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1281 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1282 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1283 IEM_MC_ADVANCE_RIP();
1284 IEM_MC_END();
1285 }
1286 else
1287 {
1288 /*
1289 * Memory, register.
1290 */
1291 IEM_MC_BEGIN(0, 2);
1292 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1294
1295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1297 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1298 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1299
1300 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1301 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1302
1303 IEM_MC_ADVANCE_RIP();
1304 IEM_MC_END();
1305 }
1306 return VINF_SUCCESS;
1307}
1308
1309
1310/**
1311 * @opcode 0x11
1312 * @oppfx 0x66
1313 * @opcpuid sse2
1314 * @opgroup og_sse2_pcksclr_datamove
1315 * @opxcpttype 4UA
1316 * @optest op1=1 op2=2 -> op1=2
1317 * @optest op1=0 op2=-42 -> op1=-42
1318 */
1319FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1320{
1321 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1322 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1323 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1324 {
1325 /*
1326 * Register, register.
1327 */
1328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1329 IEM_MC_BEGIN(0, 0);
1330 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1331 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1332 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1333 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1334 IEM_MC_ADVANCE_RIP();
1335 IEM_MC_END();
1336 }
1337 else
1338 {
1339 /*
1340 * Memory, register.
1341 */
1342 IEM_MC_BEGIN(0, 2);
1343 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1345
1346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1348 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1349 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1350
1351 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1352 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1353
1354 IEM_MC_ADVANCE_RIP();
1355 IEM_MC_END();
1356 }
1357 return VINF_SUCCESS;
1358}
1359
1360
1361/**
1362 * @opcode 0x11
1363 * @oppfx 0xf3
1364 * @opcpuid sse
1365 * @opgroup og_sse_simdfp_datamove
1366 * @opxcpttype 5
1367 * @optest op1=1 op2=2 -> op1=2
1368 * @optest op1=0 op2=-22 -> op1=-22
1369 */
1370FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1371{
1372 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1373 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1374 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1375 {
1376 /*
1377 * Register, register.
1378 */
1379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1380 IEM_MC_BEGIN(0, 1);
1381 IEM_MC_LOCAL(uint32_t, uSrc);
1382
1383 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1384 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1385 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1386 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1387
1388 IEM_MC_ADVANCE_RIP();
1389 IEM_MC_END();
1390 }
1391 else
1392 {
1393 /*
1394 * Memory, register.
1395 */
1396 IEM_MC_BEGIN(0, 2);
1397 IEM_MC_LOCAL(uint32_t, uSrc);
1398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1399
1400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1402 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1403 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1404
1405 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1406 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1407
1408 IEM_MC_ADVANCE_RIP();
1409 IEM_MC_END();
1410 }
1411 return VINF_SUCCESS;
1412}
1413
1414
1415/**
1416 * @opcode 0x11
1417 * @oppfx 0xf2
1418 * @opcpuid sse2
1419 * @opgroup og_sse2_pcksclr_datamove
1420 * @opxcpttype 5
1421 * @optest op1=1 op2=2 -> op1=2
1422 * @optest op1=0 op2=-42 -> op1=-42
1423 */
1424FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1425{
1426 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1427 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1428 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1429 {
1430 /*
1431 * Register, register.
1432 */
1433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1434 IEM_MC_BEGIN(0, 1);
1435 IEM_MC_LOCAL(uint64_t, uSrc);
1436
1437 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1438 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1439 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1440 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1441
1442 IEM_MC_ADVANCE_RIP();
1443 IEM_MC_END();
1444 }
1445 else
1446 {
1447 /*
1448 * Memory, register.
1449 */
1450 IEM_MC_BEGIN(0, 2);
1451 IEM_MC_LOCAL(uint64_t, uSrc);
1452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1453
1454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1456 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1457 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1458
1459 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1460 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1461
1462 IEM_MC_ADVANCE_RIP();
1463 IEM_MC_END();
1464 }
1465 return VINF_SUCCESS;
1466}
1467
1468
1469FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1470{
1471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1472 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1473 {
1474 /**
1475 * @opcode 0x12
1476 * @opcodesub 11 mr/reg
1477 * @oppfx none
1478 * @opcpuid sse
1479 * @opgroup og_sse_simdfp_datamove
1480 * @opxcpttype 5
1481 * @optest op1=1 op2=2 -> op1=2
1482 * @optest op1=0 op2=-42 -> op1=-42
1483 */
1484 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1485
1486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1487 IEM_MC_BEGIN(0, 1);
1488 IEM_MC_LOCAL(uint64_t, uSrc);
1489
1490 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1491 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1492 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1493 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1494
1495 IEM_MC_ADVANCE_RIP();
1496 IEM_MC_END();
1497 }
1498 else
1499 {
1500 /**
1501 * @opdone
1502 * @opcode 0x12
1503 * @opcodesub !11 mr/reg
1504 * @oppfx none
1505 * @opcpuid sse
1506 * @opgroup og_sse_simdfp_datamove
1507 * @opxcpttype 5
1508 * @optest op1=1 op2=2 -> op1=2
1509 * @optest op1=0 op2=-42 -> op1=-42
1510 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1511 */
1512 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1513
1514 IEM_MC_BEGIN(0, 2);
1515 IEM_MC_LOCAL(uint64_t, uSrc);
1516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1517
1518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1520 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1521 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1522
1523 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1524 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1525
1526 IEM_MC_ADVANCE_RIP();
1527 IEM_MC_END();
1528 }
1529 return VINF_SUCCESS;
1530}
1531
1532
1533/**
1534 * @opcode 0x12
1535 * @opcodesub !11 mr/reg
1536 * @oppfx 0x66
1537 * @opcpuid sse2
1538 * @opgroup og_sse2_pcksclr_datamove
1539 * @opxcpttype 5
1540 * @optest op1=1 op2=2 -> op1=2
1541 * @optest op1=0 op2=-42 -> op1=-42
1542 */
1543FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1544{
1545 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1546 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1547 {
1548 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1549
1550 IEM_MC_BEGIN(0, 2);
1551 IEM_MC_LOCAL(uint64_t, uSrc);
1552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1553
1554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1556 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1557 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1558
1559 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1560 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1561
1562 IEM_MC_ADVANCE_RIP();
1563 IEM_MC_END();
1564 return VINF_SUCCESS;
1565 }
1566
1567 /**
1568 * @opdone
1569 * @opmnemonic ud660f12m3
1570 * @opcode 0x12
1571 * @opcodesub 11 mr/reg
1572 * @oppfx 0x66
1573 * @opunused immediate
1574 * @opcpuid sse
1575 * @optest ->
1576 */
1577 return IEMOP_RAISE_INVALID_OPCODE();
1578}
1579
1580
1581/**
1582 * @opcode 0x12
1583 * @oppfx 0xf3
1584 * @opcpuid sse3
1585 * @opgroup og_sse3_pcksclr_datamove
1586 * @opxcpttype 4
1587 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1588 * op1=0x00000002000000020000000100000001
1589 */
1590FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1591{
1592 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1593 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1594 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1595 {
1596 /*
1597 * Register, register.
1598 */
1599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1600 IEM_MC_BEGIN(2, 0);
1601 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1602 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1603
1604 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1605 IEM_MC_PREPARE_SSE_USAGE();
1606
1607 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1608 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1609 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1610
1611 IEM_MC_ADVANCE_RIP();
1612 IEM_MC_END();
1613 }
1614 else
1615 {
1616 /*
1617 * Register, memory.
1618 */
1619 IEM_MC_BEGIN(2, 2);
1620 IEM_MC_LOCAL(RTUINT128U, uSrc);
1621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1622 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1623 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1624
1625 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1627 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1628 IEM_MC_PREPARE_SSE_USAGE();
1629
1630 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1631 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1632 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1633
1634 IEM_MC_ADVANCE_RIP();
1635 IEM_MC_END();
1636 }
1637 return VINF_SUCCESS;
1638}
1639
1640
1641/**
1642 * @opcode 0x12
1643 * @oppfx 0xf2
1644 * @opcpuid sse3
1645 * @opgroup og_sse3_pcksclr_datamove
1646 * @opxcpttype 5
1647 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1648 * op1=0x22222222111111112222222211111111
1649 */
1650FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1651{
1652 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1653 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1654 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1655 {
1656 /*
1657 * Register, register.
1658 */
1659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1660 IEM_MC_BEGIN(2, 0);
1661 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1662 IEM_MC_ARG(uint64_t, uSrc, 1);
1663
1664 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1665 IEM_MC_PREPARE_SSE_USAGE();
1666
1667 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1668 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1669 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1670
1671 IEM_MC_ADVANCE_RIP();
1672 IEM_MC_END();
1673 }
1674 else
1675 {
1676 /*
1677 * Register, memory.
1678 */
1679 IEM_MC_BEGIN(2, 2);
1680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1681 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1682 IEM_MC_ARG(uint64_t, uSrc, 1);
1683
1684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1686 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1687 IEM_MC_PREPARE_SSE_USAGE();
1688
1689 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1690 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1691 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1692
1693 IEM_MC_ADVANCE_RIP();
1694 IEM_MC_END();
1695 }
1696 return VINF_SUCCESS;
1697}
1698
1699
1700/**
1701 * @opcode 0x13
1702 * @opcodesub !11 mr/reg
1703 * @oppfx none
1704 * @opcpuid sse
1705 * @opgroup og_sse_simdfp_datamove
1706 * @opxcpttype 5
1707 * @optest op1=1 op2=2 -> op1=2
1708 * @optest op1=0 op2=-42 -> op1=-42
1709 */
1710FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1711{
1712 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1713 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1714 {
1715 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1716
1717 IEM_MC_BEGIN(0, 2);
1718 IEM_MC_LOCAL(uint64_t, uSrc);
1719 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1720
1721 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1723 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1724 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1725
1726 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1727 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1728
1729 IEM_MC_ADVANCE_RIP();
1730 IEM_MC_END();
1731 return VINF_SUCCESS;
1732 }
1733
1734 /**
1735 * @opdone
1736 * @opmnemonic ud0f13m3
1737 * @opcode 0x13
1738 * @opcodesub 11 mr/reg
1739 * @oppfx none
1740 * @opunused immediate
1741 * @opcpuid sse
1742 * @optest ->
1743 */
1744 return IEMOP_RAISE_INVALID_OPCODE();
1745}
1746
1747
1748/**
1749 * @opcode 0x13
1750 * @opcodesub !11 mr/reg
1751 * @oppfx 0x66
1752 * @opcpuid sse2
1753 * @opgroup og_sse2_pcksclr_datamove
1754 * @opxcpttype 5
1755 * @optest op1=1 op2=2 -> op1=2
1756 * @optest op1=0 op2=-42 -> op1=-42
1757 */
1758FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1759{
1760 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1761 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1762 {
1763 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1764 IEM_MC_BEGIN(0, 2);
1765 IEM_MC_LOCAL(uint64_t, uSrc);
1766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1767
1768 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1770 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1771 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1772
1773 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1774 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1775
1776 IEM_MC_ADVANCE_RIP();
1777 IEM_MC_END();
1778 return VINF_SUCCESS;
1779 }
1780
1781 /**
1782 * @opdone
1783 * @opmnemonic ud660f13m3
1784 * @opcode 0x13
1785 * @opcodesub 11 mr/reg
1786 * @oppfx 0x66
1787 * @opunused immediate
1788 * @opcpuid sse
1789 * @optest ->
1790 */
1791 return IEMOP_RAISE_INVALID_OPCODE();
1792}
1793
1794
1795/**
1796 * @opmnemonic udf30f13
1797 * @opcode 0x13
1798 * @oppfx 0xf3
1799 * @opunused intel-modrm
1800 * @opcpuid sse
1801 * @optest ->
1802 * @opdone
1803 */
1804
1805/**
1806 * @opmnemonic udf20f13
1807 * @opcode 0x13
1808 * @oppfx 0xf2
1809 * @opunused intel-modrm
1810 * @opcpuid sse
1811 * @optest ->
1812 * @opdone
1813 */
1814
1815/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1816FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1817/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1818FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1819
1820/**
1821 * @opdone
1822 * @opmnemonic udf30f14
1823 * @opcode 0x14
1824 * @oppfx 0xf3
1825 * @opunused intel-modrm
1826 * @opcpuid sse
1827 * @optest ->
1828 * @opdone
1829 */
1830
1831/**
1832 * @opmnemonic udf20f14
1833 * @opcode 0x14
1834 * @oppfx 0xf2
1835 * @opunused intel-modrm
1836 * @opcpuid sse
1837 * @optest ->
1838 * @opdone
1839 */
1840
1841/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1842FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1843/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1844FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1845/* Opcode 0xf3 0x0f 0x15 - invalid */
1846/* Opcode 0xf2 0x0f 0x15 - invalid */
1847
1848/**
1849 * @opdone
1850 * @opmnemonic udf30f15
1851 * @opcode 0x15
1852 * @oppfx 0xf3
1853 * @opunused intel-modrm
1854 * @opcpuid sse
1855 * @optest ->
1856 * @opdone
1857 */
1858
1859/**
1860 * @opmnemonic udf20f15
1861 * @opcode 0x15
1862 * @oppfx 0xf2
1863 * @opunused intel-modrm
1864 * @opcpuid sse
1865 * @optest ->
1866 * @opdone
1867 */
1868
1869FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1870{
1871 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1872 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1873 {
1874 /**
1875 * @opcode 0x16
1876 * @opcodesub 11 mr/reg
1877 * @oppfx none
1878 * @opcpuid sse
1879 * @opgroup og_sse_simdfp_datamove
1880 * @opxcpttype 5
1881 * @optest op1=1 op2=2 -> op1=2
1882 * @optest op1=0 op2=-42 -> op1=-42
1883 */
1884 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1885
1886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1887 IEM_MC_BEGIN(0, 1);
1888 IEM_MC_LOCAL(uint64_t, uSrc);
1889
1890 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1891 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1892 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1893 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1894
1895 IEM_MC_ADVANCE_RIP();
1896 IEM_MC_END();
1897 }
1898 else
1899 {
1900 /**
1901 * @opdone
1902 * @opcode 0x16
1903 * @opcodesub !11 mr/reg
1904 * @oppfx none
1905 * @opcpuid sse
1906 * @opgroup og_sse_simdfp_datamove
1907 * @opxcpttype 5
1908 * @optest op1=1 op2=2 -> op1=2
1909 * @optest op1=0 op2=-42 -> op1=-42
1910 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1911 */
1912 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1913
1914 IEM_MC_BEGIN(0, 2);
1915 IEM_MC_LOCAL(uint64_t, uSrc);
1916 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1917
1918 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1920 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1921 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1922
1923 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1924 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1925
1926 IEM_MC_ADVANCE_RIP();
1927 IEM_MC_END();
1928 }
1929 return VINF_SUCCESS;
1930}
1931
1932
1933/**
1934 * @opcode 0x16
1935 * @opcodesub !11 mr/reg
1936 * @oppfx 0x66
1937 * @opcpuid sse2
1938 * @opgroup og_sse2_pcksclr_datamove
1939 * @opxcpttype 5
1940 * @optest op1=1 op2=2 -> op1=2
1941 * @optest op1=0 op2=-42 -> op1=-42
1942 */
1943FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1944{
1945 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1946 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1947 {
1948 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1949 IEM_MC_BEGIN(0, 2);
1950 IEM_MC_LOCAL(uint64_t, uSrc);
1951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1952
1953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1955 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1956 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1957
1958 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1959 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1960
1961 IEM_MC_ADVANCE_RIP();
1962 IEM_MC_END();
1963 return VINF_SUCCESS;
1964 }
1965
1966 /**
1967 * @opdone
1968 * @opmnemonic ud660f16m3
1969 * @opcode 0x16
1970 * @opcodesub 11 mr/reg
1971 * @oppfx 0x66
1972 * @opunused immediate
1973 * @opcpuid sse
1974 * @optest ->
1975 */
1976 return IEMOP_RAISE_INVALID_OPCODE();
1977}
1978
1979
1980/**
1981 * @opcode 0x16
1982 * @oppfx 0xf3
1983 * @opcpuid sse3
1984 * @opgroup og_sse3_pcksclr_datamove
1985 * @opxcpttype 4
1986 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1987 * op1=0x00000002000000020000000100000001
1988 */
1989FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1990{
1991 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1992 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1993 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1994 {
1995 /*
1996 * Register, register.
1997 */
1998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1999 IEM_MC_BEGIN(2, 0);
2000 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2001 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2002
2003 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2004 IEM_MC_PREPARE_SSE_USAGE();
2005
2006 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2007 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2008 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
2009
2010 IEM_MC_ADVANCE_RIP();
2011 IEM_MC_END();
2012 }
2013 else
2014 {
2015 /*
2016 * Register, memory.
2017 */
2018 IEM_MC_BEGIN(2, 2);
2019 IEM_MC_LOCAL(RTUINT128U, uSrc);
2020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2021 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2022 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2023
2024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2026 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2027 IEM_MC_PREPARE_SSE_USAGE();
2028
2029 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2030 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2031 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
2032
2033 IEM_MC_ADVANCE_RIP();
2034 IEM_MC_END();
2035 }
2036 return VINF_SUCCESS;
2037}
2038
2039/**
2040 * @opdone
2041 * @opmnemonic udf30f16
2042 * @opcode 0x16
2043 * @oppfx 0xf2
2044 * @opunused intel-modrm
2045 * @opcpuid sse
2046 * @optest ->
2047 * @opdone
2048 */
2049
2050
2051/**
2052 * @opcode 0x17
2053 * @opcodesub !11 mr/reg
2054 * @oppfx none
2055 * @opcpuid sse
2056 * @opgroup og_sse_simdfp_datamove
2057 * @opxcpttype 5
2058 * @optest op1=1 op2=2 -> op1=2
2059 * @optest op1=0 op2=-42 -> op1=-42
2060 */
2061FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
2062{
2063 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2064 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2065 {
2066 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2067
2068 IEM_MC_BEGIN(0, 2);
2069 IEM_MC_LOCAL(uint64_t, uSrc);
2070 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2071
2072 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2074 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2075 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2076
2077 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2078 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2079
2080 IEM_MC_ADVANCE_RIP();
2081 IEM_MC_END();
2082 return VINF_SUCCESS;
2083 }
2084
2085 /**
2086 * @opdone
2087 * @opmnemonic ud0f17m3
2088 * @opcode 0x17
2089 * @opcodesub 11 mr/reg
2090 * @oppfx none
2091 * @opunused immediate
2092 * @opcpuid sse
2093 * @optest ->
2094 */
2095 return IEMOP_RAISE_INVALID_OPCODE();
2096}
2097
2098
2099/**
2100 * @opcode 0x17
2101 * @opcodesub !11 mr/reg
2102 * @oppfx 0x66
2103 * @opcpuid sse2
2104 * @opgroup og_sse2_pcksclr_datamove
2105 * @opxcpttype 5
2106 * @optest op1=1 op2=2 -> op1=2
2107 * @optest op1=0 op2=-42 -> op1=-42
2108 */
2109FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
2110{
2111 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2112 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2113 {
2114 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2115
2116 IEM_MC_BEGIN(0, 2);
2117 IEM_MC_LOCAL(uint64_t, uSrc);
2118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2119
2120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2122 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2123 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2124
2125 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2126 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2127
2128 IEM_MC_ADVANCE_RIP();
2129 IEM_MC_END();
2130 return VINF_SUCCESS;
2131 }
2132
2133 /**
2134 * @opdone
2135 * @opmnemonic ud660f17m3
2136 * @opcode 0x17
2137 * @opcodesub 11 mr/reg
2138 * @oppfx 0x66
2139 * @opunused immediate
2140 * @opcpuid sse
2141 * @optest ->
2142 */
2143 return IEMOP_RAISE_INVALID_OPCODE();
2144}
2145
2146
2147/**
2148 * @opdone
2149 * @opmnemonic udf30f17
2150 * @opcode 0x17
2151 * @oppfx 0xf3
2152 * @opunused intel-modrm
2153 * @opcpuid sse
2154 * @optest ->
2155 * @opdone
2156 */
2157
2158/**
2159 * @opmnemonic udf20f17
2160 * @opcode 0x17
2161 * @oppfx 0xf2
2162 * @opunused intel-modrm
2163 * @opcpuid sse
2164 * @optest ->
2165 * @opdone
2166 */
2167
2168
2169/** Opcode 0x0f 0x18. */
2170FNIEMOP_DEF(iemOp_prefetch_Grp16)
2171{
2172 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2173 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2174 {
2175 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2176 {
2177 case 4: /* Aliased to /0 for the time being according to AMD. */
2178 case 5: /* Aliased to /0 for the time being according to AMD. */
2179 case 6: /* Aliased to /0 for the time being according to AMD. */
2180 case 7: /* Aliased to /0 for the time being according to AMD. */
2181 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2182 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2183 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2184 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2185 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2186 }
2187
2188 IEM_MC_BEGIN(0, 1);
2189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2192 /* Currently a NOP. */
2193 NOREF(GCPtrEffSrc);
2194 IEM_MC_ADVANCE_RIP();
2195 IEM_MC_END();
2196 return VINF_SUCCESS;
2197 }
2198
2199 return IEMOP_RAISE_INVALID_OPCODE();
2200}
2201
2202
2203/** Opcode 0x0f 0x19..0x1f. */
2204FNIEMOP_DEF(iemOp_nop_Ev)
2205{
2206 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2208 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2209 {
2210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2211 IEM_MC_BEGIN(0, 0);
2212 IEM_MC_ADVANCE_RIP();
2213 IEM_MC_END();
2214 }
2215 else
2216 {
2217 IEM_MC_BEGIN(0, 1);
2218 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2219 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2221 /* Currently a NOP. */
2222 NOREF(GCPtrEffSrc);
2223 IEM_MC_ADVANCE_RIP();
2224 IEM_MC_END();
2225 }
2226 return VINF_SUCCESS;
2227}
2228
2229
2230/** Opcode 0x0f 0x20. */
2231FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2232{
2233 /* mod is ignored, as is operand size overrides. */
2234 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2235 IEMOP_HLP_MIN_386();
2236 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2237 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2238 else
2239 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2240
2241 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2242 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2243 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2244 {
2245 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2246 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2247 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2248 iCrReg |= 8;
2249 }
2250 switch (iCrReg)
2251 {
2252 case 0: case 2: case 3: case 4: case 8:
2253 break;
2254 default:
2255 return IEMOP_RAISE_INVALID_OPCODE();
2256 }
2257 IEMOP_HLP_DONE_DECODING();
2258
2259 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2260}
2261
2262
2263/** Opcode 0x0f 0x21. */
2264FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2265{
2266 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2267 IEMOP_HLP_MIN_386();
2268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2270 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2271 return IEMOP_RAISE_INVALID_OPCODE();
2272 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2273 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2274 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2275}
2276
2277
2278/** Opcode 0x0f 0x22. */
2279FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2280{
2281 /* mod is ignored, as is operand size overrides. */
2282 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2283 IEMOP_HLP_MIN_386();
2284 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2285 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2286 else
2287 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2288
2289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2290 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2291 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2292 {
2293 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2294 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2295 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2296 iCrReg |= 8;
2297 }
2298 switch (iCrReg)
2299 {
2300 case 0: case 2: case 3: case 4: case 8:
2301 break;
2302 default:
2303 return IEMOP_RAISE_INVALID_OPCODE();
2304 }
2305 IEMOP_HLP_DONE_DECODING();
2306
2307 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2308}
2309
2310
2311/** Opcode 0x0f 0x23. */
2312FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2313{
2314 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2315 IEMOP_HLP_MIN_386();
2316 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2318 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2319 return IEMOP_RAISE_INVALID_OPCODE();
2320 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2321 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2322 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2323}
2324
2325
2326/** Opcode 0x0f 0x24. */
2327FNIEMOP_DEF(iemOp_mov_Rd_Td)
2328{
2329 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2330 IEMOP_HLP_MIN_386();
2331 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2333 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
2334 return IEMOP_RAISE_INVALID_OPCODE();
2335 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Td,
2336 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2337 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2338}
2339
2340
2341/** Opcode 0x0f 0x26. */
2342FNIEMOP_DEF(iemOp_mov_Td_Rd)
2343{
2344 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2345 IEMOP_HLP_MIN_386();
2346 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2348 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
2349 return IEMOP_RAISE_INVALID_OPCODE();
2350 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Td_Rd,
2351 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2352 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2353}
2354
2355
2356/**
2357 * @opcode 0x28
2358 * @oppfx none
2359 * @opcpuid sse
2360 * @opgroup og_sse_simdfp_datamove
2361 * @opxcpttype 1
2362 * @optest op1=1 op2=2 -> op1=2
2363 * @optest op1=0 op2=-42 -> op1=-42
2364 */
2365FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2366{
2367 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2368 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2369 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2370 {
2371 /*
2372 * Register, register.
2373 */
2374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2375 IEM_MC_BEGIN(0, 0);
2376 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2377 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2378 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2379 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2380 IEM_MC_ADVANCE_RIP();
2381 IEM_MC_END();
2382 }
2383 else
2384 {
2385 /*
2386 * Register, memory.
2387 */
2388 IEM_MC_BEGIN(0, 2);
2389 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2391
2392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2394 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2395 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2396
2397 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2398 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2399
2400 IEM_MC_ADVANCE_RIP();
2401 IEM_MC_END();
2402 }
2403 return VINF_SUCCESS;
2404}
2405
2406/**
2407 * @opcode 0x28
2408 * @oppfx 66
2409 * @opcpuid sse2
2410 * @opgroup og_sse2_pcksclr_datamove
2411 * @opxcpttype 1
2412 * @optest op1=1 op2=2 -> op1=2
2413 * @optest op1=0 op2=-42 -> op1=-42
2414 */
2415FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2416{
2417 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2418 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2419 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2420 {
2421 /*
2422 * Register, register.
2423 */
2424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2425 IEM_MC_BEGIN(0, 0);
2426 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2427 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2428 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2429 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2430 IEM_MC_ADVANCE_RIP();
2431 IEM_MC_END();
2432 }
2433 else
2434 {
2435 /*
2436 * Register, memory.
2437 */
2438 IEM_MC_BEGIN(0, 2);
2439 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2441
2442 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2444 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2445 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2446
2447 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2448 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2449
2450 IEM_MC_ADVANCE_RIP();
2451 IEM_MC_END();
2452 }
2453 return VINF_SUCCESS;
2454}
2455
2456/* Opcode 0xf3 0x0f 0x28 - invalid */
2457/* Opcode 0xf2 0x0f 0x28 - invalid */
2458
2459/**
2460 * @opcode 0x29
2461 * @oppfx none
2462 * @opcpuid sse
2463 * @opgroup og_sse_simdfp_datamove
2464 * @opxcpttype 1
2465 * @optest op1=1 op2=2 -> op1=2
2466 * @optest op1=0 op2=-42 -> op1=-42
2467 */
2468FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2469{
2470 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2472 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2473 {
2474 /*
2475 * Register, register.
2476 */
2477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2478 IEM_MC_BEGIN(0, 0);
2479 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2480 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2481 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2482 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2483 IEM_MC_ADVANCE_RIP();
2484 IEM_MC_END();
2485 }
2486 else
2487 {
2488 /*
2489 * Memory, register.
2490 */
2491 IEM_MC_BEGIN(0, 2);
2492 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2494
2495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2497 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2498 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2499
2500 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2501 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2502
2503 IEM_MC_ADVANCE_RIP();
2504 IEM_MC_END();
2505 }
2506 return VINF_SUCCESS;
2507}
2508
2509/**
2510 * @opcode 0x29
2511 * @oppfx 66
2512 * @opcpuid sse2
2513 * @opgroup og_sse2_pcksclr_datamove
2514 * @opxcpttype 1
2515 * @optest op1=1 op2=2 -> op1=2
2516 * @optest op1=0 op2=-42 -> op1=-42
2517 */
2518FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2519{
2520 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2521 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2522 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2523 {
2524 /*
2525 * Register, register.
2526 */
2527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2528 IEM_MC_BEGIN(0, 0);
2529 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2530 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2531 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2532 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2533 IEM_MC_ADVANCE_RIP();
2534 IEM_MC_END();
2535 }
2536 else
2537 {
2538 /*
2539 * Memory, register.
2540 */
2541 IEM_MC_BEGIN(0, 2);
2542 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2544
2545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2547 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2548 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2549
2550 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2551 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2552
2553 IEM_MC_ADVANCE_RIP();
2554 IEM_MC_END();
2555 }
2556 return VINF_SUCCESS;
2557}
2558
2559/* Opcode 0xf3 0x0f 0x29 - invalid */
2560/* Opcode 0xf2 0x0f 0x29 - invalid */
2561
2562
2563/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2564FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2565/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2566FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2567/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2568FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2569/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2570FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2571
2572
2573/**
2574 * @opcode 0x2b
2575 * @opcodesub !11 mr/reg
2576 * @oppfx none
2577 * @opcpuid sse
2578 * @opgroup og_sse1_cachect
2579 * @opxcpttype 1
2580 * @optest op1=1 op2=2 -> op1=2
2581 * @optest op1=0 op2=-42 -> op1=-42
2582 */
2583FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2584{
2585 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2586 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2587 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2588 {
2589 /*
2590 * memory, register.
2591 */
2592 IEM_MC_BEGIN(0, 2);
2593 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2595
2596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2598 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2599 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2600
2601 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2602 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2603
2604 IEM_MC_ADVANCE_RIP();
2605 IEM_MC_END();
2606 }
2607 /* The register, register encoding is invalid. */
2608 else
2609 return IEMOP_RAISE_INVALID_OPCODE();
2610 return VINF_SUCCESS;
2611}
2612
2613/**
2614 * @opcode 0x2b
2615 * @opcodesub !11 mr/reg
2616 * @oppfx 0x66
2617 * @opcpuid sse2
2618 * @opgroup og_sse2_cachect
2619 * @opxcpttype 1
2620 * @optest op1=1 op2=2 -> op1=2
2621 * @optest op1=0 op2=-42 -> op1=-42
2622 */
2623FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2624{
2625 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2626 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2627 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2628 {
2629 /*
2630 * memory, register.
2631 */
2632 IEM_MC_BEGIN(0, 2);
2633 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2634 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2635
2636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2638 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2639 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2640
2641 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2642 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2643
2644 IEM_MC_ADVANCE_RIP();
2645 IEM_MC_END();
2646 }
2647 /* The register, register encoding is invalid. */
2648 else
2649 return IEMOP_RAISE_INVALID_OPCODE();
2650 return VINF_SUCCESS;
2651}
2652/* Opcode 0xf3 0x0f 0x2b - invalid */
2653/* Opcode 0xf2 0x0f 0x2b - invalid */
2654
2655
2656/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2657FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2658/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2659FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2660/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2661FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2662/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2663FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2664
2665/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2666FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2667/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2668FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2669/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2670FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2671/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2672FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2673
2674/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2675FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2676/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2677FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2678/* Opcode 0xf3 0x0f 0x2e - invalid */
2679/* Opcode 0xf2 0x0f 0x2e - invalid */
2680
2681/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2682FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2683/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2684FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2685/* Opcode 0xf3 0x0f 0x2f - invalid */
2686/* Opcode 0xf2 0x0f 0x2f - invalid */
2687
2688/** Opcode 0x0f 0x30. */
2689FNIEMOP_DEF(iemOp_wrmsr)
2690{
2691 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2693 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2694}
2695
2696
2697/** Opcode 0x0f 0x31. */
2698FNIEMOP_DEF(iemOp_rdtsc)
2699{
2700 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2702 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2703}
2704
2705
2706/** Opcode 0x0f 0x33. */
2707FNIEMOP_DEF(iemOp_rdmsr)
2708{
2709 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2711 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2712}
2713
2714
2715/** Opcode 0x0f 0x34. */
2716FNIEMOP_DEF(iemOp_rdpmc)
2717{
2718 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2720 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2721}
2722
2723
2724/** Opcode 0x0f 0x34. */
2725FNIEMOP_DEF(iemOp_sysenter)
2726{
2727 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
2728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2729 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysenter);
2730}
2731
2732/** Opcode 0x0f 0x35. */
2733FNIEMOP_DEF(iemOp_sysexit)
2734{
2735 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
2736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2737 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
2738}
2739
2740/** Opcode 0x0f 0x37. */
2741FNIEMOP_STUB(iemOp_getsec);
2742
2743
2744/** Opcode 0x0f 0x38. */
2745FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2746{
2747#ifdef IEM_WITH_THREE_0F_38
2748 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2749 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2750#else
2751 IEMOP_BITCH_ABOUT_STUB();
2752 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2753#endif
2754}
2755
2756
2757/** Opcode 0x0f 0x3a. */
2758FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2759{
2760#ifdef IEM_WITH_THREE_0F_3A
2761 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2762 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2763#else
2764 IEMOP_BITCH_ABOUT_STUB();
2765 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2766#endif
2767}
2768
2769
2770/**
2771 * Implements a conditional move.
2772 *
2773 * Wish there was an obvious way to do this where we could share and reduce
2774 * code bloat.
2775 *
2776 * @param a_Cnd The conditional "microcode" operation.
2777 */
2778#define CMOV_X(a_Cnd) \
2779 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2780 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2781 { \
2782 switch (pVCpu->iem.s.enmEffOpSize) \
2783 { \
2784 case IEMMODE_16BIT: \
2785 IEM_MC_BEGIN(0, 1); \
2786 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2787 a_Cnd { \
2788 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2789 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2790 } IEM_MC_ENDIF(); \
2791 IEM_MC_ADVANCE_RIP(); \
2792 IEM_MC_END(); \
2793 return VINF_SUCCESS; \
2794 \
2795 case IEMMODE_32BIT: \
2796 IEM_MC_BEGIN(0, 1); \
2797 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2798 a_Cnd { \
2799 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2800 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2801 } IEM_MC_ELSE() { \
2802 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2803 } IEM_MC_ENDIF(); \
2804 IEM_MC_ADVANCE_RIP(); \
2805 IEM_MC_END(); \
2806 return VINF_SUCCESS; \
2807 \
2808 case IEMMODE_64BIT: \
2809 IEM_MC_BEGIN(0, 1); \
2810 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2811 a_Cnd { \
2812 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2813 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2814 } IEM_MC_ENDIF(); \
2815 IEM_MC_ADVANCE_RIP(); \
2816 IEM_MC_END(); \
2817 return VINF_SUCCESS; \
2818 \
2819 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2820 } \
2821 } \
2822 else \
2823 { \
2824 switch (pVCpu->iem.s.enmEffOpSize) \
2825 { \
2826 case IEMMODE_16BIT: \
2827 IEM_MC_BEGIN(0, 2); \
2828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2829 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2831 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2832 a_Cnd { \
2833 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2834 } IEM_MC_ENDIF(); \
2835 IEM_MC_ADVANCE_RIP(); \
2836 IEM_MC_END(); \
2837 return VINF_SUCCESS; \
2838 \
2839 case IEMMODE_32BIT: \
2840 IEM_MC_BEGIN(0, 2); \
2841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2842 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2844 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2845 a_Cnd { \
2846 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2847 } IEM_MC_ELSE() { \
2848 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2849 } IEM_MC_ENDIF(); \
2850 IEM_MC_ADVANCE_RIP(); \
2851 IEM_MC_END(); \
2852 return VINF_SUCCESS; \
2853 \
2854 case IEMMODE_64BIT: \
2855 IEM_MC_BEGIN(0, 2); \
2856 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2857 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2859 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2860 a_Cnd { \
2861 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2862 } IEM_MC_ENDIF(); \
2863 IEM_MC_ADVANCE_RIP(); \
2864 IEM_MC_END(); \
2865 return VINF_SUCCESS; \
2866 \
2867 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2868 } \
2869 } do {} while (0)
2870
2871
2872
2873/** Opcode 0x0f 0x40. */
2874FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2875{
2876 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2877 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2878}
2879
2880
2881/** Opcode 0x0f 0x41. */
2882FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2883{
2884 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2885 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2886}
2887
2888
2889/** Opcode 0x0f 0x42. */
2890FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2891{
2892 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2893 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2894}
2895
2896
2897/** Opcode 0x0f 0x43. */
2898FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2899{
2900 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2901 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2902}
2903
2904
2905/** Opcode 0x0f 0x44. */
2906FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2907{
2908 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2909 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2910}
2911
2912
2913/** Opcode 0x0f 0x45. */
2914FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2915{
2916 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2917 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2918}
2919
2920
2921/** Opcode 0x0f 0x46. */
2922FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2923{
2924 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2925 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2926}
2927
2928
2929/** Opcode 0x0f 0x47. */
2930FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2931{
2932 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2933 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2934}
2935
2936
2937/** Opcode 0x0f 0x48. */
2938FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2939{
2940 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2941 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2942}
2943
2944
2945/** Opcode 0x0f 0x49. */
2946FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2947{
2948 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2949 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2950}
2951
2952
2953/** Opcode 0x0f 0x4a. */
2954FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2955{
2956 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2957 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2958}
2959
2960
2961/** Opcode 0x0f 0x4b. */
2962FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2963{
2964 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2965 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2966}
2967
2968
2969/** Opcode 0x0f 0x4c. */
2970FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2971{
2972 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2973 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2974}
2975
2976
2977/** Opcode 0x0f 0x4d. */
2978FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2979{
2980 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2981 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2982}
2983
2984
2985/** Opcode 0x0f 0x4e. */
2986FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2987{
2988 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2989 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2990}
2991
2992
2993/** Opcode 0x0f 0x4f. */
2994FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2995{
2996 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2997 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2998}
2999
3000#undef CMOV_X
3001
3002/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
3003FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
3004/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
3005FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
3006/* Opcode 0xf3 0x0f 0x50 - invalid */
3007/* Opcode 0xf2 0x0f 0x50 - invalid */
3008
3009/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
3010FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
3011/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
3012FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
3013/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
3014FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
3015/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
3016FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
3017
3018/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
3019FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
3020/* Opcode 0x66 0x0f 0x52 - invalid */
3021/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
3022FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
3023/* Opcode 0xf2 0x0f 0x52 - invalid */
3024
3025/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
3026FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
3027/* Opcode 0x66 0x0f 0x53 - invalid */
3028/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
3029FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
3030/* Opcode 0xf2 0x0f 0x53 - invalid */
3031
3032
3033/** Opcode 0x0f 0x54 - andps Vps, Wps */
3034FNIEMOP_DEF(iemOp_andps_Vps_Wps)
3035{
3036 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3037 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pand);
3038}
3039
3040
3041/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
3042FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
3043{
3044 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3045 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pand);
3046}
3047
3048
3049/* Opcode 0xf3 0x0f 0x54 - invalid */
3050/* Opcode 0xf2 0x0f 0x54 - invalid */
3051
3052
3053/** Opcode 0x0f 0x55 - andnps Vps, Wps */
3054FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
3055{
3056 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3057 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pandn);
3058}
3059
3060
3061/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
3062FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
3063{
3064 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3065 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pandn);
3066}
3067
3068
3069/* Opcode 0xf3 0x0f 0x55 - invalid */
3070/* Opcode 0xf2 0x0f 0x55 - invalid */
3071
3072
3073/** Opcode 0x0f 0x56 - orps Vps, Wps */
3074FNIEMOP_DEF(iemOp_orps_Vps_Wps)
3075{
3076 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3077 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_por);
3078}
3079
3080
3081/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
3082FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
3083{
3084 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3085 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_por);
3086}
3087
3088
3089/* Opcode 0xf3 0x0f 0x56 - invalid */
3090/* Opcode 0xf2 0x0f 0x56 - invalid */
3091
3092
3093/** Opcode 0x0f 0x57 - xorps Vps, Wps */
3094FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
3095{
3096 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3097 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
3098}
3099
3100
3101/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
3102FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
3103{
3104 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3105 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
3106}
3107
3108
3109/* Opcode 0xf3 0x0f 0x57 - invalid */
3110/* Opcode 0xf2 0x0f 0x57 - invalid */
3111
3112/** Opcode 0x0f 0x58 - addps Vps, Wps */
3113FNIEMOP_STUB(iemOp_addps_Vps_Wps);
3114/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
3115FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
3116/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
3117FNIEMOP_STUB(iemOp_addss_Vss_Wss);
3118/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
3119FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
3120
3121/** Opcode 0x0f 0x59 - mulps Vps, Wps */
3122FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
3123/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
3124FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
3125/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
3126FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
3127/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
3128FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
3129
3130/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
3131FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
3132/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
3133FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
3134/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
3135FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
3136/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
3137FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
3138
3139/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
3140FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
3141/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
3142FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
3143/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
3144FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
3145/* Opcode 0xf2 0x0f 0x5b - invalid */
3146
3147/** Opcode 0x0f 0x5c - subps Vps, Wps */
3148FNIEMOP_STUB(iemOp_subps_Vps_Wps);
3149/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
3150FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
3151/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
3152FNIEMOP_STUB(iemOp_subss_Vss_Wss);
3153/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
3154FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
3155
3156/** Opcode 0x0f 0x5d - minps Vps, Wps */
3157FNIEMOP_STUB(iemOp_minps_Vps_Wps);
3158/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
3159FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
3160/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
3161FNIEMOP_STUB(iemOp_minss_Vss_Wss);
3162/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
3163FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
3164
3165/** Opcode 0x0f 0x5e - divps Vps, Wps */
3166FNIEMOP_STUB(iemOp_divps_Vps_Wps);
3167/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
3168FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
3169/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
3170FNIEMOP_STUB(iemOp_divss_Vss_Wss);
3171/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
3172FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
3173
3174/** Opcode 0x0f 0x5f - maxps Vps, Wps */
3175FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
3176/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
3177FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
3178/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
3179FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
3180/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
3181FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
3182
3183/**
3184 * Common worker for MMX instructions on the forms:
3185 * pxxxx mm1, mm2/mem32
3186 *
3187 * The 2nd operand is the first half of a register, which in the memory case
3188 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3189 * memory accessed for MMX.
3190 *
3191 * Exceptions type 4.
3192 */
3193FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3194{
3195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3196 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3197 {
3198 /*
3199 * Register, register.
3200 */
3201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3202 IEM_MC_BEGIN(2, 0);
3203 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3204 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3205 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3206 IEM_MC_PREPARE_SSE_USAGE();
3207 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3208 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3209 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3210 IEM_MC_ADVANCE_RIP();
3211 IEM_MC_END();
3212 }
3213 else
3214 {
3215 /*
3216 * Register, memory.
3217 */
3218 IEM_MC_BEGIN(2, 2);
3219 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3220 IEM_MC_LOCAL(uint64_t, uSrc);
3221 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3222 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3223
3224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3226 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3227 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3228
3229 IEM_MC_PREPARE_SSE_USAGE();
3230 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3231 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3232
3233 IEM_MC_ADVANCE_RIP();
3234 IEM_MC_END();
3235 }
3236 return VINF_SUCCESS;
3237}
3238
3239
3240/**
3241 * Common worker for SSE2 instructions on the forms:
3242 * pxxxx xmm1, xmm2/mem128
3243 *
3244 * The 2nd operand is the first half of a register, which in the memory case
3245 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3246 * memory accessed for MMX.
3247 *
3248 * Exceptions type 4.
3249 */
3250FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3251{
3252 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3253 if (!pImpl->pfnU64)
3254 return IEMOP_RAISE_INVALID_OPCODE();
3255 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3256 {
3257 /*
3258 * Register, register.
3259 */
3260 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3261 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3263 IEM_MC_BEGIN(2, 0);
3264 IEM_MC_ARG(uint64_t *, pDst, 0);
3265 IEM_MC_ARG(uint32_t const *, pSrc, 1);
3266 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3267 IEM_MC_PREPARE_FPU_USAGE();
3268 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3269 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3270 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3271 IEM_MC_ADVANCE_RIP();
3272 IEM_MC_END();
3273 }
3274 else
3275 {
3276 /*
3277 * Register, memory.
3278 */
3279 IEM_MC_BEGIN(2, 2);
3280 IEM_MC_ARG(uint64_t *, pDst, 0);
3281 IEM_MC_LOCAL(uint32_t, uSrc);
3282 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3284
3285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3287 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3288 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3289
3290 IEM_MC_PREPARE_FPU_USAGE();
3291 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3292 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3293
3294 IEM_MC_ADVANCE_RIP();
3295 IEM_MC_END();
3296 }
3297 return VINF_SUCCESS;
3298}
3299
3300
3301/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3302FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3303{
3304 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3305 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3306}
3307
3308/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3309FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3310{
3311 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3312 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3313}
3314
3315/* Opcode 0xf3 0x0f 0x60 - invalid */
3316
3317
3318/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3319FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3320{
3321 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3322 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3323}
3324
3325/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3326FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3327{
3328 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3329 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3330}
3331
3332/* Opcode 0xf3 0x0f 0x61 - invalid */
3333
3334
3335/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3336FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3337{
3338 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3339 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3340}
3341
3342/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3343FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3344{
3345 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3346 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3347}
3348
3349/* Opcode 0xf3 0x0f 0x62 - invalid */
3350
3351
3352
3353/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3354FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3355/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3356FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3357/* Opcode 0xf3 0x0f 0x63 - invalid */
3358
3359
3360/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3361FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
3362{
3363 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3364 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpgtb);
3365}
3366
3367
3368/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3369FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
3370{
3371 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3372 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpgtb);
3373}
3374
3375
3376/* Opcode 0xf3 0x0f 0x64 - invalid */
3377
3378
3379/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3380FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
3381{
3382 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3383 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpgtw);
3384}
3385
3386
3387/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3388FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
3389{
3390 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3391 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpgtw);
3392}
3393
3394
3395/* Opcode 0xf3 0x0f 0x65 - invalid */
3396
3397
3398/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3399FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
3400{
3401 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3402 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpgtd);
3403}
3404
3405
3406/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3407FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
3408{
3409 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3410 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpgtd);
3411}
3412
3413
3414/* Opcode 0xf3 0x0f 0x66 - invalid */
3415
3416/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3417FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3418/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3419FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3420/* Opcode 0xf3 0x0f 0x67 - invalid */
3421
3422
3423/**
3424 * Common worker for MMX instructions on the form:
3425 * pxxxx mm1, mm2/mem64
3426 *
3427 * The 2nd operand is the second half of a register, which in the memory case
3428 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3429 * where it may read the full 128 bits or only the upper 64 bits.
3430 *
3431 * Exceptions type 4.
3432 */
3433FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3434{
3435 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3436 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3437 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3438 {
3439 /*
3440 * Register, register.
3441 */
3442 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3443 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3445 IEM_MC_BEGIN(2, 0);
3446 IEM_MC_ARG(uint64_t *, pDst, 0);
3447 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3448 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3449 IEM_MC_PREPARE_FPU_USAGE();
3450 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3451 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3452 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3453 IEM_MC_ADVANCE_RIP();
3454 IEM_MC_END();
3455 }
3456 else
3457 {
3458 /*
3459 * Register, memory.
3460 */
3461 IEM_MC_BEGIN(2, 2);
3462 IEM_MC_ARG(uint64_t *, pDst, 0);
3463 IEM_MC_LOCAL(uint64_t, uSrc);
3464 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3465 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3466
3467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3469 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3470 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3471
3472 IEM_MC_PREPARE_FPU_USAGE();
3473 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3474 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3475
3476 IEM_MC_ADVANCE_RIP();
3477 IEM_MC_END();
3478 }
3479 return VINF_SUCCESS;
3480}
3481
3482
3483/**
3484 * Common worker for SSE2 instructions on the form:
3485 * pxxxx xmm1, xmm2/mem128
3486 *
3487 * The 2nd operand is the second half of a register, which in the memory case
3488 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3489 * where it may read the full 128 bits or only the upper 64 bits.
3490 *
3491 * Exceptions type 4.
3492 */
3493FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3494{
3495 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3496 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3497 {
3498 /*
3499 * Register, register.
3500 */
3501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3502 IEM_MC_BEGIN(2, 0);
3503 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3504 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3505 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3506 IEM_MC_PREPARE_SSE_USAGE();
3507 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3508 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3509 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3510 IEM_MC_ADVANCE_RIP();
3511 IEM_MC_END();
3512 }
3513 else
3514 {
3515 /*
3516 * Register, memory.
3517 */
3518 IEM_MC_BEGIN(2, 2);
3519 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3520 IEM_MC_LOCAL(RTUINT128U, uSrc);
3521 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3523
3524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3526 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3527 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3528
3529 IEM_MC_PREPARE_SSE_USAGE();
3530 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3531 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3532
3533 IEM_MC_ADVANCE_RIP();
3534 IEM_MC_END();
3535 }
3536 return VINF_SUCCESS;
3537}
3538
3539
3540/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3541FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3542{
3543 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3544 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3545}
3546
3547/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3548FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3549{
3550 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3551 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3552}
3553/* Opcode 0xf3 0x0f 0x68 - invalid */
3554
3555
3556/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3557FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3558{
3559 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3560 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3561}
3562
3563/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3564FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3565{
3566 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3567 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3568
3569}
3570/* Opcode 0xf3 0x0f 0x69 - invalid */
3571
3572
3573/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3574FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3575{
3576 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3577 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3578}
3579
3580/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3581FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3582{
3583 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3584 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3585}
3586/* Opcode 0xf3 0x0f 0x6a - invalid */
3587
3588
3589/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3590FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3591/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3592FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3593/* Opcode 0xf3 0x0f 0x6b - invalid */
3594
3595
3596/* Opcode 0x0f 0x6c - invalid */
3597
3598/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3599FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3600{
3601 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3602 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3603}
3604
3605/* Opcode 0xf3 0x0f 0x6c - invalid */
3606/* Opcode 0xf2 0x0f 0x6c - invalid */
3607
3608
3609/* Opcode 0x0f 0x6d - invalid */
3610
3611/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3612FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3613{
3614 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3615 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3616}
3617
3618/* Opcode 0xf3 0x0f 0x6d - invalid */
3619
3620
3621FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3622{
3623 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3624 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3625 {
3626 /**
3627 * @opcode 0x6e
3628 * @opcodesub rex.w=1
3629 * @oppfx none
3630 * @opcpuid mmx
3631 * @opgroup og_mmx_datamove
3632 * @opxcpttype 5
3633 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
3634 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
3635 */
3636 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3637 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3638 {
3639 /* MMX, greg64 */
3640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3641 IEM_MC_BEGIN(0, 1);
3642 IEM_MC_LOCAL(uint64_t, u64Tmp);
3643
3644 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3645 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3646
3647 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3648 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3649 IEM_MC_FPU_TO_MMX_MODE();
3650
3651 IEM_MC_ADVANCE_RIP();
3652 IEM_MC_END();
3653 }
3654 else
3655 {
3656 /* MMX, [mem64] */
3657 IEM_MC_BEGIN(0, 2);
3658 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3659 IEM_MC_LOCAL(uint64_t, u64Tmp);
3660
3661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3663 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3664 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3665
3666 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3667 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3668 IEM_MC_FPU_TO_MMX_MODE();
3669
3670 IEM_MC_ADVANCE_RIP();
3671 IEM_MC_END();
3672 }
3673 }
3674 else
3675 {
3676 /**
3677 * @opdone
3678 * @opcode 0x6e
3679 * @opcodesub rex.w=0
3680 * @oppfx none
3681 * @opcpuid mmx
3682 * @opgroup og_mmx_datamove
3683 * @opxcpttype 5
3684 * @opfunction iemOp_movd_q_Pd_Ey
3685 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3686 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3687 */
3688 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3689 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3690 {
3691 /* MMX, greg */
3692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3693 IEM_MC_BEGIN(0, 1);
3694 IEM_MC_LOCAL(uint64_t, u64Tmp);
3695
3696 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3697 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3698
3699 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3700 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3701 IEM_MC_FPU_TO_MMX_MODE();
3702
3703 IEM_MC_ADVANCE_RIP();
3704 IEM_MC_END();
3705 }
3706 else
3707 {
3708 /* MMX, [mem] */
3709 IEM_MC_BEGIN(0, 2);
3710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3711 IEM_MC_LOCAL(uint32_t, u32Tmp);
3712
3713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3715 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3716 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3717
3718 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3719 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3720 IEM_MC_FPU_TO_MMX_MODE();
3721
3722 IEM_MC_ADVANCE_RIP();
3723 IEM_MC_END();
3724 }
3725 }
3726 return VINF_SUCCESS;
3727}
3728
3729FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3730{
3731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3732 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3733 {
3734 /**
3735 * @opcode 0x6e
3736 * @opcodesub rex.w=1
3737 * @oppfx 0x66
3738 * @opcpuid sse2
3739 * @opgroup og_sse2_simdint_datamove
3740 * @opxcpttype 5
3741 * @optest 64-bit / op1=1 op2=2 -> op1=2
3742 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3743 */
3744 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3745 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3746 {
3747 /* XMM, greg64 */
3748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3749 IEM_MC_BEGIN(0, 1);
3750 IEM_MC_LOCAL(uint64_t, u64Tmp);
3751
3752 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3753 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3754
3755 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3756 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3757
3758 IEM_MC_ADVANCE_RIP();
3759 IEM_MC_END();
3760 }
3761 else
3762 {
3763 /* XMM, [mem64] */
3764 IEM_MC_BEGIN(0, 2);
3765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3766 IEM_MC_LOCAL(uint64_t, u64Tmp);
3767
3768 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3770 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3771 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3772
3773 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3774 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3775
3776 IEM_MC_ADVANCE_RIP();
3777 IEM_MC_END();
3778 }
3779 }
3780 else
3781 {
3782 /**
3783 * @opdone
3784 * @opcode 0x6e
3785 * @opcodesub rex.w=0
3786 * @oppfx 0x66
3787 * @opcpuid sse2
3788 * @opgroup og_sse2_simdint_datamove
3789 * @opxcpttype 5
3790 * @opfunction iemOp_movd_q_Vy_Ey
3791 * @optest op1=1 op2=2 -> op1=2
3792 * @optest op1=0 op2=-42 -> op1=-42
3793 */
3794 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3795 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3796 {
3797 /* XMM, greg32 */
3798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3799 IEM_MC_BEGIN(0, 1);
3800 IEM_MC_LOCAL(uint32_t, u32Tmp);
3801
3802 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3803 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3804
3805 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3806 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3807
3808 IEM_MC_ADVANCE_RIP();
3809 IEM_MC_END();
3810 }
3811 else
3812 {
3813 /* XMM, [mem32] */
3814 IEM_MC_BEGIN(0, 2);
3815 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3816 IEM_MC_LOCAL(uint32_t, u32Tmp);
3817
3818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3820 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3821 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3822
3823 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3824 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3825
3826 IEM_MC_ADVANCE_RIP();
3827 IEM_MC_END();
3828 }
3829 }
3830 return VINF_SUCCESS;
3831}
3832
3833/* Opcode 0xf3 0x0f 0x6e - invalid */
3834
3835
3836/**
3837 * @opcode 0x6f
3838 * @oppfx none
3839 * @opcpuid mmx
3840 * @opgroup og_mmx_datamove
3841 * @opxcpttype 5
3842 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3843 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3844 */
3845FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3846{
3847 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3848 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3849 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3850 {
3851 /*
3852 * Register, register.
3853 */
3854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3855 IEM_MC_BEGIN(0, 1);
3856 IEM_MC_LOCAL(uint64_t, u64Tmp);
3857
3858 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3859 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3860
3861 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3862 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3863 IEM_MC_FPU_TO_MMX_MODE();
3864
3865 IEM_MC_ADVANCE_RIP();
3866 IEM_MC_END();
3867 }
3868 else
3869 {
3870 /*
3871 * Register, memory.
3872 */
3873 IEM_MC_BEGIN(0, 2);
3874 IEM_MC_LOCAL(uint64_t, u64Tmp);
3875 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3876
3877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3879 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3880 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3881
3882 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3883 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3884 IEM_MC_FPU_TO_MMX_MODE();
3885
3886 IEM_MC_ADVANCE_RIP();
3887 IEM_MC_END();
3888 }
3889 return VINF_SUCCESS;
3890}
3891
3892/**
3893 * @opcode 0x6f
3894 * @oppfx 0x66
3895 * @opcpuid sse2
3896 * @opgroup og_sse2_simdint_datamove
3897 * @opxcpttype 1
3898 * @optest op1=1 op2=2 -> op1=2
3899 * @optest op1=0 op2=-42 -> op1=-42
3900 */
3901FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
3902{
3903 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3904 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3905 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3906 {
3907 /*
3908 * Register, register.
3909 */
3910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3911 IEM_MC_BEGIN(0, 0);
3912
3913 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3914 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3915
3916 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3917 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3918 IEM_MC_ADVANCE_RIP();
3919 IEM_MC_END();
3920 }
3921 else
3922 {
3923 /*
3924 * Register, memory.
3925 */
3926 IEM_MC_BEGIN(0, 2);
3927 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3928 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3929
3930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3932 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3933 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3934
3935 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3936 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3937
3938 IEM_MC_ADVANCE_RIP();
3939 IEM_MC_END();
3940 }
3941 return VINF_SUCCESS;
3942}
3943
3944/**
3945 * @opcode 0x6f
3946 * @oppfx 0xf3
3947 * @opcpuid sse2
3948 * @opgroup og_sse2_simdint_datamove
3949 * @opxcpttype 4UA
3950 * @optest op1=1 op2=2 -> op1=2
3951 * @optest op1=0 op2=-42 -> op1=-42
3952 */
3953FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
3954{
3955 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3956 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3957 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3958 {
3959 /*
3960 * Register, register.
3961 */
3962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3963 IEM_MC_BEGIN(0, 0);
3964 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3965 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3966 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3967 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3968 IEM_MC_ADVANCE_RIP();
3969 IEM_MC_END();
3970 }
3971 else
3972 {
3973 /*
3974 * Register, memory.
3975 */
3976 IEM_MC_BEGIN(0, 2);
3977 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3979
3980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3982 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3983 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3984 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3985 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3986
3987 IEM_MC_ADVANCE_RIP();
3988 IEM_MC_END();
3989 }
3990 return VINF_SUCCESS;
3991}
3992
3993
3994/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3995FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3996{
3997 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3998 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3999 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4000 {
4001 /*
4002 * Register, register.
4003 */
4004 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4006
4007 IEM_MC_BEGIN(3, 0);
4008 IEM_MC_ARG(uint64_t *, pDst, 0);
4009 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4010 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4011 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
4012 IEM_MC_PREPARE_FPU_USAGE();
4013 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4014 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
4015 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
4016 IEM_MC_ADVANCE_RIP();
4017 IEM_MC_END();
4018 }
4019 else
4020 {
4021 /*
4022 * Register, memory.
4023 */
4024 IEM_MC_BEGIN(3, 2);
4025 IEM_MC_ARG(uint64_t *, pDst, 0);
4026 IEM_MC_LOCAL(uint64_t, uSrc);
4027 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4028 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4029
4030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4031 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4032 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4034 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
4035
4036 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4037 IEM_MC_PREPARE_FPU_USAGE();
4038 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4039 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
4040
4041 IEM_MC_ADVANCE_RIP();
4042 IEM_MC_END();
4043 }
4044 return VINF_SUCCESS;
4045}
4046
4047/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
4048FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
4049{
4050 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
4051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4052 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4053 {
4054 /*
4055 * Register, register.
4056 */
4057 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4059
4060 IEM_MC_BEGIN(3, 0);
4061 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4062 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4063 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4064 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4065 IEM_MC_PREPARE_SSE_USAGE();
4066 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4067 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4068 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
4069 IEM_MC_ADVANCE_RIP();
4070 IEM_MC_END();
4071 }
4072 else
4073 {
4074 /*
4075 * Register, memory.
4076 */
4077 IEM_MC_BEGIN(3, 2);
4078 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4079 IEM_MC_LOCAL(RTUINT128U, uSrc);
4080 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4082
4083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4084 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4085 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4087 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4088
4089 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4090 IEM_MC_PREPARE_SSE_USAGE();
4091 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4092 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
4093
4094 IEM_MC_ADVANCE_RIP();
4095 IEM_MC_END();
4096 }
4097 return VINF_SUCCESS;
4098}
4099
4100/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
4101FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
4102{
4103 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
4104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4105 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4106 {
4107 /*
4108 * Register, register.
4109 */
4110 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4112
4113 IEM_MC_BEGIN(3, 0);
4114 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4115 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4116 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4117 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4118 IEM_MC_PREPARE_SSE_USAGE();
4119 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4120 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4121 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
4122 IEM_MC_ADVANCE_RIP();
4123 IEM_MC_END();
4124 }
4125 else
4126 {
4127 /*
4128 * Register, memory.
4129 */
4130 IEM_MC_BEGIN(3, 2);
4131 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4132 IEM_MC_LOCAL(RTUINT128U, uSrc);
4133 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4135
4136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4137 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4138 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4140 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4141
4142 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4143 IEM_MC_PREPARE_SSE_USAGE();
4144 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4145 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
4146
4147 IEM_MC_ADVANCE_RIP();
4148 IEM_MC_END();
4149 }
4150 return VINF_SUCCESS;
4151}
4152
4153/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
4154FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
4155{
4156 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
4157 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4158 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4159 {
4160 /*
4161 * Register, register.
4162 */
4163 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4165
4166 IEM_MC_BEGIN(3, 0);
4167 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4168 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4169 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4170 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4171 IEM_MC_PREPARE_SSE_USAGE();
4172 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4173 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4174 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
4175 IEM_MC_ADVANCE_RIP();
4176 IEM_MC_END();
4177 }
4178 else
4179 {
4180 /*
4181 * Register, memory.
4182 */
4183 IEM_MC_BEGIN(3, 2);
4184 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4185 IEM_MC_LOCAL(RTUINT128U, uSrc);
4186 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4188
4189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4190 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4191 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4193 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4194
4195 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4196 IEM_MC_PREPARE_SSE_USAGE();
4197 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4198 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
4199
4200 IEM_MC_ADVANCE_RIP();
4201 IEM_MC_END();
4202 }
4203 return VINF_SUCCESS;
4204}
4205
4206
4207/** Opcode 0x0f 0x71 11/2. */
4208FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
4209
4210/** Opcode 0x66 0x0f 0x71 11/2. */
4211FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
4212
4213/** Opcode 0x0f 0x71 11/4. */
4214FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
4215
4216/** Opcode 0x66 0x0f 0x71 11/4. */
4217FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
4218
4219/** Opcode 0x0f 0x71 11/6. */
4220FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
4221
4222/** Opcode 0x66 0x0f 0x71 11/6. */
4223FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
4224
4225
4226/**
4227 * Group 12 jump table for register variant.
4228 */
4229IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
4230{
4231 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4232 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4233 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4234 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4235 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4236 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4237 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4238 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4239};
4240AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
4241
4242
4243/** Opcode 0x0f 0x71. */
4244FNIEMOP_DEF(iemOp_Grp12)
4245{
4246 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4247 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4248 /* register, register */
4249 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4250 + pVCpu->iem.s.idxPrefix], bRm);
4251 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4252}
4253
4254
4255/** Opcode 0x0f 0x72 11/2. */
4256FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
4257
4258/** Opcode 0x66 0x0f 0x72 11/2. */
4259FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
4260
4261/** Opcode 0x0f 0x72 11/4. */
4262FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
4263
4264/** Opcode 0x66 0x0f 0x72 11/4. */
4265FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
4266
4267/** Opcode 0x0f 0x72 11/6. */
4268FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
4269
4270/** Opcode 0x66 0x0f 0x72 11/6. */
4271FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
4272
4273
4274/**
4275 * Group 13 jump table for register variant.
4276 */
4277IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
4278{
4279 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4280 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4281 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4282 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4283 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4284 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4285 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4286 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4287};
4288AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
4289
4290/** Opcode 0x0f 0x72. */
4291FNIEMOP_DEF(iemOp_Grp13)
4292{
4293 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4294 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4295 /* register, register */
4296 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4297 + pVCpu->iem.s.idxPrefix], bRm);
4298 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4299}
4300
4301
4302/** Opcode 0x0f 0x73 11/2. */
4303FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
4304
4305/** Opcode 0x66 0x0f 0x73 11/2. */
4306FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
4307
4308/** Opcode 0x66 0x0f 0x73 11/3. */
4309FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
4310
4311/** Opcode 0x0f 0x73 11/6. */
4312FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
4313
4314/** Opcode 0x66 0x0f 0x73 11/6. */
4315FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
4316
4317/** Opcode 0x66 0x0f 0x73 11/7. */
4318FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
4319
4320/**
4321 * Group 14 jump table for register variant.
4322 */
4323IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4324{
4325 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4326 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4327 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4328 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4329 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4330 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4331 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4332 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4333};
4334AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4335
4336
4337/** Opcode 0x0f 0x73. */
4338FNIEMOP_DEF(iemOp_Grp14)
4339{
4340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4341 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4342 /* register, register */
4343 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4344 + pVCpu->iem.s.idxPrefix], bRm);
4345 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4346}
4347
4348
4349/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4350FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4351{
4352 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4353 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4354}
4355
4356
4357/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4358FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4359{
4360 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4361 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4362}
4363
4364
4365/* Opcode 0xf3 0x0f 0x74 - invalid */
4366/* Opcode 0xf2 0x0f 0x74 - invalid */
4367
4368
4369/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4370FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4371{
4372 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4373 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4374}
4375
4376
4377/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4378FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4379{
4380 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4381 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4382}
4383
4384
4385/* Opcode 0xf3 0x0f 0x75 - invalid */
4386/* Opcode 0xf2 0x0f 0x75 - invalid */
4387
4388
4389/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4390FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4391{
4392 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4393 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4394}
4395
4396
4397/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4398FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4399{
4400 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4401 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4402}
4403
4404
4405/* Opcode 0xf3 0x0f 0x76 - invalid */
4406/* Opcode 0xf2 0x0f 0x76 - invalid */
4407
4408
4409/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4410FNIEMOP_DEF(iemOp_emms)
4411{
4412 IEMOP_MNEMONIC(emms, "emms");
4413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4414
4415 IEM_MC_BEGIN(0,0);
4416 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
4417 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4418 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4419 IEM_MC_FPU_FROM_MMX_MODE();
4420 IEM_MC_ADVANCE_RIP();
4421 IEM_MC_END();
4422 return VINF_SUCCESS;
4423}
4424
4425/* Opcode 0x66 0x0f 0x77 - invalid */
4426/* Opcode 0xf3 0x0f 0x77 - invalid */
4427/* Opcode 0xf2 0x0f 0x77 - invalid */
4428
4429/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4430#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4431FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
4432{
4433 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
4434 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
4435 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
4436 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
4437
4438 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4439 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4440 {
4441 /*
4442 * Register, register.
4443 */
4444 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4445 if (enmEffOpSize == IEMMODE_64BIT)
4446 {
4447 IEM_MC_BEGIN(2, 0);
4448 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4449 IEM_MC_ARG(uint64_t, u64Enc, 1);
4450 IEM_MC_FETCH_GREG_U64(u64Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4451 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4452 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg64, pu64Dst, u64Enc);
4453 IEM_MC_END();
4454 }
4455 else
4456 {
4457 IEM_MC_BEGIN(2, 0);
4458 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4459 IEM_MC_ARG(uint32_t, u32Enc, 1);
4460 IEM_MC_FETCH_GREG_U32(u32Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4461 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4462 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg32, pu32Dst, u32Enc);
4463 IEM_MC_END();
4464 }
4465 }
4466 else
4467 {
4468 /*
4469 * Memory, register.
4470 */
4471 if (enmEffOpSize == IEMMODE_64BIT)
4472 {
4473 IEM_MC_BEGIN(3, 0);
4474 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4475 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
4476 IEM_MC_ARG(uint64_t, u64Enc, 2);
4477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4478 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4479 IEM_MC_FETCH_GREG_U64(u64Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4480 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4481 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
4482 IEM_MC_END();
4483 }
4484 else
4485 {
4486 IEM_MC_BEGIN(3, 0);
4487 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4488 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
4489 IEM_MC_ARG(uint32_t, u32Enc, 2);
4490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4491 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4492 IEM_MC_FETCH_GREG_U32(u32Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4493 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4494 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
4495 IEM_MC_END();
4496 }
4497 }
4498 return VINF_SUCCESS;
4499}
4500#else
4501FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4502#endif
4503
4504/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4505FNIEMOP_STUB(iemOp_AmdGrp17);
4506/* Opcode 0xf3 0x0f 0x78 - invalid */
4507/* Opcode 0xf2 0x0f 0x78 - invalid */
4508
4509/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4510#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4511FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
4512{
4513 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
4514 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
4515 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
4516 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
4517
4518 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4519 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4520 {
4521 /*
4522 * Register, register.
4523 */
4524 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4525 if (enmEffOpSize == IEMMODE_64BIT)
4526 {
4527 IEM_MC_BEGIN(2, 0);
4528 IEM_MC_ARG(uint64_t, u64Val, 0);
4529 IEM_MC_ARG(uint64_t, u64Enc, 1);
4530 IEM_MC_FETCH_GREG_U64(u64Val, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4531 IEM_MC_FETCH_GREG_U64(u64Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4532 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
4533 IEM_MC_END();
4534 }
4535 else
4536 {
4537 IEM_MC_BEGIN(2, 0);
4538 IEM_MC_ARG(uint32_t, u32Val, 0);
4539 IEM_MC_ARG(uint32_t, u32Enc, 1);
4540 IEM_MC_FETCH_GREG_U32(u32Val, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4541 IEM_MC_FETCH_GREG_U32(u32Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4542 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
4543 IEM_MC_END();
4544 }
4545 }
4546 else
4547 {
4548 /*
4549 * Register, memory.
4550 */
4551 if (enmEffOpSize == IEMMODE_64BIT)
4552 {
4553 IEM_MC_BEGIN(3, 0);
4554 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4555 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
4556 IEM_MC_ARG(uint64_t, u64Enc, 2);
4557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4558 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4559 IEM_MC_FETCH_GREG_U64(u64Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4560 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4561 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
4562 IEM_MC_END();
4563 }
4564 else
4565 {
4566 IEM_MC_BEGIN(3, 0);
4567 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4568 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
4569 IEM_MC_ARG(uint32_t, u32Enc, 2);
4570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4571 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4572 IEM_MC_FETCH_GREG_U32(u32Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4573 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4574 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
4575 IEM_MC_END();
4576 }
4577 }
4578 return VINF_SUCCESS;
4579}
4580#else
4581FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4582#endif
4583/* Opcode 0x66 0x0f 0x79 - invalid */
4584/* Opcode 0xf3 0x0f 0x79 - invalid */
4585/* Opcode 0xf2 0x0f 0x79 - invalid */
4586
4587/* Opcode 0x0f 0x7a - invalid */
4588/* Opcode 0x66 0x0f 0x7a - invalid */
4589/* Opcode 0xf3 0x0f 0x7a - invalid */
4590/* Opcode 0xf2 0x0f 0x7a - invalid */
4591
4592/* Opcode 0x0f 0x7b - invalid */
4593/* Opcode 0x66 0x0f 0x7b - invalid */
4594/* Opcode 0xf3 0x0f 0x7b - invalid */
4595/* Opcode 0xf2 0x0f 0x7b - invalid */
4596
4597/* Opcode 0x0f 0x7c - invalid */
4598/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4599FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4600/* Opcode 0xf3 0x0f 0x7c - invalid */
4601/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4602FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4603
4604/* Opcode 0x0f 0x7d - invalid */
4605/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4606FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4607/* Opcode 0xf3 0x0f 0x7d - invalid */
4608/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4609FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4610
4611
4612/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4613FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4614{
4615 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4616 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4617 {
4618 /**
4619 * @opcode 0x7e
4620 * @opcodesub rex.w=1
4621 * @oppfx none
4622 * @opcpuid mmx
4623 * @opgroup og_mmx_datamove
4624 * @opxcpttype 5
4625 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
4626 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
4627 */
4628 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4629 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4630 {
4631 /* greg64, MMX */
4632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4633 IEM_MC_BEGIN(0, 1);
4634 IEM_MC_LOCAL(uint64_t, u64Tmp);
4635
4636 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4637 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4638
4639 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4640 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4641 IEM_MC_FPU_TO_MMX_MODE();
4642
4643 IEM_MC_ADVANCE_RIP();
4644 IEM_MC_END();
4645 }
4646 else
4647 {
4648 /* [mem64], MMX */
4649 IEM_MC_BEGIN(0, 2);
4650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4651 IEM_MC_LOCAL(uint64_t, u64Tmp);
4652
4653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4655 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4656 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4657
4658 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4659 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4660 IEM_MC_FPU_TO_MMX_MODE();
4661
4662 IEM_MC_ADVANCE_RIP();
4663 IEM_MC_END();
4664 }
4665 }
4666 else
4667 {
4668 /**
4669 * @opdone
4670 * @opcode 0x7e
4671 * @opcodesub rex.w=0
4672 * @oppfx none
4673 * @opcpuid mmx
4674 * @opgroup og_mmx_datamove
4675 * @opxcpttype 5
4676 * @opfunction iemOp_movd_q_Pd_Ey
4677 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4678 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4679 */
4680 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4681 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4682 {
4683 /* greg32, MMX */
4684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4685 IEM_MC_BEGIN(0, 1);
4686 IEM_MC_LOCAL(uint32_t, u32Tmp);
4687
4688 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4689 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4690
4691 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4692 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4693 IEM_MC_FPU_TO_MMX_MODE();
4694
4695 IEM_MC_ADVANCE_RIP();
4696 IEM_MC_END();
4697 }
4698 else
4699 {
4700 /* [mem32], MMX */
4701 IEM_MC_BEGIN(0, 2);
4702 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4703 IEM_MC_LOCAL(uint32_t, u32Tmp);
4704
4705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4707 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4708 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4709
4710 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4711 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4712 IEM_MC_FPU_TO_MMX_MODE();
4713
4714 IEM_MC_ADVANCE_RIP();
4715 IEM_MC_END();
4716 }
4717 }
4718 return VINF_SUCCESS;
4719
4720}
4721
4722
4723FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4724{
4725 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4726 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4727 {
4728 /**
4729 * @opcode 0x7e
4730 * @opcodesub rex.w=1
4731 * @oppfx 0x66
4732 * @opcpuid sse2
4733 * @opgroup og_sse2_simdint_datamove
4734 * @opxcpttype 5
4735 * @optest 64-bit / op1=1 op2=2 -> op1=2
4736 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4737 */
4738 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4739 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4740 {
4741 /* greg64, XMM */
4742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4743 IEM_MC_BEGIN(0, 1);
4744 IEM_MC_LOCAL(uint64_t, u64Tmp);
4745
4746 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4747 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4748
4749 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4750 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4751
4752 IEM_MC_ADVANCE_RIP();
4753 IEM_MC_END();
4754 }
4755 else
4756 {
4757 /* [mem64], XMM */
4758 IEM_MC_BEGIN(0, 2);
4759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4760 IEM_MC_LOCAL(uint64_t, u64Tmp);
4761
4762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4764 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4765 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4766
4767 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4768 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4769
4770 IEM_MC_ADVANCE_RIP();
4771 IEM_MC_END();
4772 }
4773 }
4774 else
4775 {
4776 /**
4777 * @opdone
4778 * @opcode 0x7e
4779 * @opcodesub rex.w=0
4780 * @oppfx 0x66
4781 * @opcpuid sse2
4782 * @opgroup og_sse2_simdint_datamove
4783 * @opxcpttype 5
4784 * @opfunction iemOp_movd_q_Vy_Ey
4785 * @optest op1=1 op2=2 -> op1=2
4786 * @optest op1=0 op2=-42 -> op1=-42
4787 */
4788 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4789 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4790 {
4791 /* greg32, XMM */
4792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4793 IEM_MC_BEGIN(0, 1);
4794 IEM_MC_LOCAL(uint32_t, u32Tmp);
4795
4796 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4797 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4798
4799 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4800 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4801
4802 IEM_MC_ADVANCE_RIP();
4803 IEM_MC_END();
4804 }
4805 else
4806 {
4807 /* [mem32], XMM */
4808 IEM_MC_BEGIN(0, 2);
4809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4810 IEM_MC_LOCAL(uint32_t, u32Tmp);
4811
4812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4814 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4815 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4816
4817 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4818 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4819
4820 IEM_MC_ADVANCE_RIP();
4821 IEM_MC_END();
4822 }
4823 }
4824 return VINF_SUCCESS;
4825
4826}
4827
4828/**
4829 * @opcode 0x7e
4830 * @oppfx 0xf3
4831 * @opcpuid sse2
4832 * @opgroup og_sse2_pcksclr_datamove
4833 * @opxcpttype none
4834 * @optest op1=1 op2=2 -> op1=2
4835 * @optest op1=0 op2=-42 -> op1=-42
4836 */
4837FNIEMOP_DEF(iemOp_movq_Vq_Wq)
4838{
4839 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4840 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4841 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4842 {
4843 /*
4844 * Register, register.
4845 */
4846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4847 IEM_MC_BEGIN(0, 2);
4848 IEM_MC_LOCAL(uint64_t, uSrc);
4849
4850 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4851 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4852
4853 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4854 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4855
4856 IEM_MC_ADVANCE_RIP();
4857 IEM_MC_END();
4858 }
4859 else
4860 {
4861 /*
4862 * Memory, register.
4863 */
4864 IEM_MC_BEGIN(0, 2);
4865 IEM_MC_LOCAL(uint64_t, uSrc);
4866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4867
4868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4870 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4871 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4872
4873 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4874 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4875
4876 IEM_MC_ADVANCE_RIP();
4877 IEM_MC_END();
4878 }
4879 return VINF_SUCCESS;
4880}
4881
4882/* Opcode 0xf2 0x0f 0x7e - invalid */
4883
4884
4885/** Opcode 0x0f 0x7f - movq Qq, Pq */
4886FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4887{
4888 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4890 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4891 {
4892 /*
4893 * Register, register.
4894 */
4895 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4896 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4898 IEM_MC_BEGIN(0, 1);
4899 IEM_MC_LOCAL(uint64_t, u64Tmp);
4900 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4901 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4902 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4903 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4904 IEM_MC_ADVANCE_RIP();
4905 IEM_MC_END();
4906 }
4907 else
4908 {
4909 /*
4910 * Register, memory.
4911 */
4912 IEM_MC_BEGIN(0, 2);
4913 IEM_MC_LOCAL(uint64_t, u64Tmp);
4914 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4915
4916 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4918 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4919 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4920
4921 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4922 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4923
4924 IEM_MC_ADVANCE_RIP();
4925 IEM_MC_END();
4926 }
4927 return VINF_SUCCESS;
4928}
4929
4930/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4931FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4932{
4933 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4934 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4935 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4936 {
4937 /*
4938 * Register, register.
4939 */
4940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4941 IEM_MC_BEGIN(0, 0);
4942 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4943 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4944 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4945 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4946 IEM_MC_ADVANCE_RIP();
4947 IEM_MC_END();
4948 }
4949 else
4950 {
4951 /*
4952 * Register, memory.
4953 */
4954 IEM_MC_BEGIN(0, 2);
4955 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4957
4958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4960 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4961 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4962
4963 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4964 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4965
4966 IEM_MC_ADVANCE_RIP();
4967 IEM_MC_END();
4968 }
4969 return VINF_SUCCESS;
4970}
4971
4972/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4973FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4974{
4975 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4976 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4977 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4978 {
4979 /*
4980 * Register, register.
4981 */
4982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4983 IEM_MC_BEGIN(0, 0);
4984 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4985 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4986 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4987 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4988 IEM_MC_ADVANCE_RIP();
4989 IEM_MC_END();
4990 }
4991 else
4992 {
4993 /*
4994 * Register, memory.
4995 */
4996 IEM_MC_BEGIN(0, 2);
4997 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4999
5000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5002 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5003 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5004
5005 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5006 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
5007
5008 IEM_MC_ADVANCE_RIP();
5009 IEM_MC_END();
5010 }
5011 return VINF_SUCCESS;
5012}
5013
5014/* Opcode 0xf2 0x0f 0x7f - invalid */
5015
5016
5017
5018/** Opcode 0x0f 0x80. */
5019FNIEMOP_DEF(iemOp_jo_Jv)
5020{
5021 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
5022 IEMOP_HLP_MIN_386();
5023 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5024 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5025 {
5026 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5028
5029 IEM_MC_BEGIN(0, 0);
5030 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5031 IEM_MC_REL_JMP_S16(i16Imm);
5032 } IEM_MC_ELSE() {
5033 IEM_MC_ADVANCE_RIP();
5034 } IEM_MC_ENDIF();
5035 IEM_MC_END();
5036 }
5037 else
5038 {
5039 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5041
5042 IEM_MC_BEGIN(0, 0);
5043 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5044 IEM_MC_REL_JMP_S32(i32Imm);
5045 } IEM_MC_ELSE() {
5046 IEM_MC_ADVANCE_RIP();
5047 } IEM_MC_ENDIF();
5048 IEM_MC_END();
5049 }
5050 return VINF_SUCCESS;
5051}
5052
5053
5054/** Opcode 0x0f 0x81. */
5055FNIEMOP_DEF(iemOp_jno_Jv)
5056{
5057 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
5058 IEMOP_HLP_MIN_386();
5059 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5060 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5061 {
5062 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5064
5065 IEM_MC_BEGIN(0, 0);
5066 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5067 IEM_MC_ADVANCE_RIP();
5068 } IEM_MC_ELSE() {
5069 IEM_MC_REL_JMP_S16(i16Imm);
5070 } IEM_MC_ENDIF();
5071 IEM_MC_END();
5072 }
5073 else
5074 {
5075 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5077
5078 IEM_MC_BEGIN(0, 0);
5079 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5080 IEM_MC_ADVANCE_RIP();
5081 } IEM_MC_ELSE() {
5082 IEM_MC_REL_JMP_S32(i32Imm);
5083 } IEM_MC_ENDIF();
5084 IEM_MC_END();
5085 }
5086 return VINF_SUCCESS;
5087}
5088
5089
5090/** Opcode 0x0f 0x82. */
5091FNIEMOP_DEF(iemOp_jc_Jv)
5092{
5093 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
5094 IEMOP_HLP_MIN_386();
5095 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5096 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5097 {
5098 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5100
5101 IEM_MC_BEGIN(0, 0);
5102 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5103 IEM_MC_REL_JMP_S16(i16Imm);
5104 } IEM_MC_ELSE() {
5105 IEM_MC_ADVANCE_RIP();
5106 } IEM_MC_ENDIF();
5107 IEM_MC_END();
5108 }
5109 else
5110 {
5111 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5113
5114 IEM_MC_BEGIN(0, 0);
5115 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5116 IEM_MC_REL_JMP_S32(i32Imm);
5117 } IEM_MC_ELSE() {
5118 IEM_MC_ADVANCE_RIP();
5119 } IEM_MC_ENDIF();
5120 IEM_MC_END();
5121 }
5122 return VINF_SUCCESS;
5123}
5124
5125
5126/** Opcode 0x0f 0x83. */
5127FNIEMOP_DEF(iemOp_jnc_Jv)
5128{
5129 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
5130 IEMOP_HLP_MIN_386();
5131 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5132 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5133 {
5134 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5136
5137 IEM_MC_BEGIN(0, 0);
5138 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5139 IEM_MC_ADVANCE_RIP();
5140 } IEM_MC_ELSE() {
5141 IEM_MC_REL_JMP_S16(i16Imm);
5142 } IEM_MC_ENDIF();
5143 IEM_MC_END();
5144 }
5145 else
5146 {
5147 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5149
5150 IEM_MC_BEGIN(0, 0);
5151 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5152 IEM_MC_ADVANCE_RIP();
5153 } IEM_MC_ELSE() {
5154 IEM_MC_REL_JMP_S32(i32Imm);
5155 } IEM_MC_ENDIF();
5156 IEM_MC_END();
5157 }
5158 return VINF_SUCCESS;
5159}
5160
5161
5162/** Opcode 0x0f 0x84. */
5163FNIEMOP_DEF(iemOp_je_Jv)
5164{
5165 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
5166 IEMOP_HLP_MIN_386();
5167 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5168 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5169 {
5170 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5172
5173 IEM_MC_BEGIN(0, 0);
5174 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5175 IEM_MC_REL_JMP_S16(i16Imm);
5176 } IEM_MC_ELSE() {
5177 IEM_MC_ADVANCE_RIP();
5178 } IEM_MC_ENDIF();
5179 IEM_MC_END();
5180 }
5181 else
5182 {
5183 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5185
5186 IEM_MC_BEGIN(0, 0);
5187 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5188 IEM_MC_REL_JMP_S32(i32Imm);
5189 } IEM_MC_ELSE() {
5190 IEM_MC_ADVANCE_RIP();
5191 } IEM_MC_ENDIF();
5192 IEM_MC_END();
5193 }
5194 return VINF_SUCCESS;
5195}
5196
5197
5198/** Opcode 0x0f 0x85. */
5199FNIEMOP_DEF(iemOp_jne_Jv)
5200{
5201 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
5202 IEMOP_HLP_MIN_386();
5203 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5204 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5205 {
5206 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5208
5209 IEM_MC_BEGIN(0, 0);
5210 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5211 IEM_MC_ADVANCE_RIP();
5212 } IEM_MC_ELSE() {
5213 IEM_MC_REL_JMP_S16(i16Imm);
5214 } IEM_MC_ENDIF();
5215 IEM_MC_END();
5216 }
5217 else
5218 {
5219 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5221
5222 IEM_MC_BEGIN(0, 0);
5223 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5224 IEM_MC_ADVANCE_RIP();
5225 } IEM_MC_ELSE() {
5226 IEM_MC_REL_JMP_S32(i32Imm);
5227 } IEM_MC_ENDIF();
5228 IEM_MC_END();
5229 }
5230 return VINF_SUCCESS;
5231}
5232
5233
5234/** Opcode 0x0f 0x86. */
5235FNIEMOP_DEF(iemOp_jbe_Jv)
5236{
5237 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
5238 IEMOP_HLP_MIN_386();
5239 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5240 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5241 {
5242 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5244
5245 IEM_MC_BEGIN(0, 0);
5246 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5247 IEM_MC_REL_JMP_S16(i16Imm);
5248 } IEM_MC_ELSE() {
5249 IEM_MC_ADVANCE_RIP();
5250 } IEM_MC_ENDIF();
5251 IEM_MC_END();
5252 }
5253 else
5254 {
5255 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5257
5258 IEM_MC_BEGIN(0, 0);
5259 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5260 IEM_MC_REL_JMP_S32(i32Imm);
5261 } IEM_MC_ELSE() {
5262 IEM_MC_ADVANCE_RIP();
5263 } IEM_MC_ENDIF();
5264 IEM_MC_END();
5265 }
5266 return VINF_SUCCESS;
5267}
5268
5269
5270/** Opcode 0x0f 0x87. */
5271FNIEMOP_DEF(iemOp_jnbe_Jv)
5272{
5273 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
5274 IEMOP_HLP_MIN_386();
5275 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5276 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5277 {
5278 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5280
5281 IEM_MC_BEGIN(0, 0);
5282 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5283 IEM_MC_ADVANCE_RIP();
5284 } IEM_MC_ELSE() {
5285 IEM_MC_REL_JMP_S16(i16Imm);
5286 } IEM_MC_ENDIF();
5287 IEM_MC_END();
5288 }
5289 else
5290 {
5291 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5293
5294 IEM_MC_BEGIN(0, 0);
5295 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5296 IEM_MC_ADVANCE_RIP();
5297 } IEM_MC_ELSE() {
5298 IEM_MC_REL_JMP_S32(i32Imm);
5299 } IEM_MC_ENDIF();
5300 IEM_MC_END();
5301 }
5302 return VINF_SUCCESS;
5303}
5304
5305
5306/** Opcode 0x0f 0x88. */
5307FNIEMOP_DEF(iemOp_js_Jv)
5308{
5309 IEMOP_MNEMONIC(js_Jv, "js Jv");
5310 IEMOP_HLP_MIN_386();
5311 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5312 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5313 {
5314 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5316
5317 IEM_MC_BEGIN(0, 0);
5318 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5319 IEM_MC_REL_JMP_S16(i16Imm);
5320 } IEM_MC_ELSE() {
5321 IEM_MC_ADVANCE_RIP();
5322 } IEM_MC_ENDIF();
5323 IEM_MC_END();
5324 }
5325 else
5326 {
5327 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5329
5330 IEM_MC_BEGIN(0, 0);
5331 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5332 IEM_MC_REL_JMP_S32(i32Imm);
5333 } IEM_MC_ELSE() {
5334 IEM_MC_ADVANCE_RIP();
5335 } IEM_MC_ENDIF();
5336 IEM_MC_END();
5337 }
5338 return VINF_SUCCESS;
5339}
5340
5341
5342/** Opcode 0x0f 0x89. */
5343FNIEMOP_DEF(iemOp_jns_Jv)
5344{
5345 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
5346 IEMOP_HLP_MIN_386();
5347 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5348 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5349 {
5350 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5352
5353 IEM_MC_BEGIN(0, 0);
5354 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5355 IEM_MC_ADVANCE_RIP();
5356 } IEM_MC_ELSE() {
5357 IEM_MC_REL_JMP_S16(i16Imm);
5358 } IEM_MC_ENDIF();
5359 IEM_MC_END();
5360 }
5361 else
5362 {
5363 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5365
5366 IEM_MC_BEGIN(0, 0);
5367 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5368 IEM_MC_ADVANCE_RIP();
5369 } IEM_MC_ELSE() {
5370 IEM_MC_REL_JMP_S32(i32Imm);
5371 } IEM_MC_ENDIF();
5372 IEM_MC_END();
5373 }
5374 return VINF_SUCCESS;
5375}
5376
5377
5378/** Opcode 0x0f 0x8a. */
5379FNIEMOP_DEF(iemOp_jp_Jv)
5380{
5381 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
5382 IEMOP_HLP_MIN_386();
5383 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5384 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5385 {
5386 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5388
5389 IEM_MC_BEGIN(0, 0);
5390 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5391 IEM_MC_REL_JMP_S16(i16Imm);
5392 } IEM_MC_ELSE() {
5393 IEM_MC_ADVANCE_RIP();
5394 } IEM_MC_ENDIF();
5395 IEM_MC_END();
5396 }
5397 else
5398 {
5399 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5401
5402 IEM_MC_BEGIN(0, 0);
5403 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5404 IEM_MC_REL_JMP_S32(i32Imm);
5405 } IEM_MC_ELSE() {
5406 IEM_MC_ADVANCE_RIP();
5407 } IEM_MC_ENDIF();
5408 IEM_MC_END();
5409 }
5410 return VINF_SUCCESS;
5411}
5412
5413
5414/** Opcode 0x0f 0x8b. */
5415FNIEMOP_DEF(iemOp_jnp_Jv)
5416{
5417 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
5418 IEMOP_HLP_MIN_386();
5419 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5420 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5421 {
5422 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5424
5425 IEM_MC_BEGIN(0, 0);
5426 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5427 IEM_MC_ADVANCE_RIP();
5428 } IEM_MC_ELSE() {
5429 IEM_MC_REL_JMP_S16(i16Imm);
5430 } IEM_MC_ENDIF();
5431 IEM_MC_END();
5432 }
5433 else
5434 {
5435 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5437
5438 IEM_MC_BEGIN(0, 0);
5439 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5440 IEM_MC_ADVANCE_RIP();
5441 } IEM_MC_ELSE() {
5442 IEM_MC_REL_JMP_S32(i32Imm);
5443 } IEM_MC_ENDIF();
5444 IEM_MC_END();
5445 }
5446 return VINF_SUCCESS;
5447}
5448
5449
5450/** Opcode 0x0f 0x8c. */
5451FNIEMOP_DEF(iemOp_jl_Jv)
5452{
5453 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
5454 IEMOP_HLP_MIN_386();
5455 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5456 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5457 {
5458 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5460
5461 IEM_MC_BEGIN(0, 0);
5462 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5463 IEM_MC_REL_JMP_S16(i16Imm);
5464 } IEM_MC_ELSE() {
5465 IEM_MC_ADVANCE_RIP();
5466 } IEM_MC_ENDIF();
5467 IEM_MC_END();
5468 }
5469 else
5470 {
5471 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5473
5474 IEM_MC_BEGIN(0, 0);
5475 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5476 IEM_MC_REL_JMP_S32(i32Imm);
5477 } IEM_MC_ELSE() {
5478 IEM_MC_ADVANCE_RIP();
5479 } IEM_MC_ENDIF();
5480 IEM_MC_END();
5481 }
5482 return VINF_SUCCESS;
5483}
5484
5485
5486/** Opcode 0x0f 0x8d. */
5487FNIEMOP_DEF(iemOp_jnl_Jv)
5488{
5489 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5490 IEMOP_HLP_MIN_386();
5491 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5492 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5493 {
5494 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5496
5497 IEM_MC_BEGIN(0, 0);
5498 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5499 IEM_MC_ADVANCE_RIP();
5500 } IEM_MC_ELSE() {
5501 IEM_MC_REL_JMP_S16(i16Imm);
5502 } IEM_MC_ENDIF();
5503 IEM_MC_END();
5504 }
5505 else
5506 {
5507 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5509
5510 IEM_MC_BEGIN(0, 0);
5511 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5512 IEM_MC_ADVANCE_RIP();
5513 } IEM_MC_ELSE() {
5514 IEM_MC_REL_JMP_S32(i32Imm);
5515 } IEM_MC_ENDIF();
5516 IEM_MC_END();
5517 }
5518 return VINF_SUCCESS;
5519}
5520
5521
5522/** Opcode 0x0f 0x8e. */
5523FNIEMOP_DEF(iemOp_jle_Jv)
5524{
5525 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5526 IEMOP_HLP_MIN_386();
5527 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5528 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5529 {
5530 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5532
5533 IEM_MC_BEGIN(0, 0);
5534 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5535 IEM_MC_REL_JMP_S16(i16Imm);
5536 } IEM_MC_ELSE() {
5537 IEM_MC_ADVANCE_RIP();
5538 } IEM_MC_ENDIF();
5539 IEM_MC_END();
5540 }
5541 else
5542 {
5543 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5545
5546 IEM_MC_BEGIN(0, 0);
5547 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5548 IEM_MC_REL_JMP_S32(i32Imm);
5549 } IEM_MC_ELSE() {
5550 IEM_MC_ADVANCE_RIP();
5551 } IEM_MC_ENDIF();
5552 IEM_MC_END();
5553 }
5554 return VINF_SUCCESS;
5555}
5556
5557
5558/** Opcode 0x0f 0x8f. */
5559FNIEMOP_DEF(iemOp_jnle_Jv)
5560{
5561 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5562 IEMOP_HLP_MIN_386();
5563 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5564 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5565 {
5566 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5568
5569 IEM_MC_BEGIN(0, 0);
5570 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5571 IEM_MC_ADVANCE_RIP();
5572 } IEM_MC_ELSE() {
5573 IEM_MC_REL_JMP_S16(i16Imm);
5574 } IEM_MC_ENDIF();
5575 IEM_MC_END();
5576 }
5577 else
5578 {
5579 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5581
5582 IEM_MC_BEGIN(0, 0);
5583 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5584 IEM_MC_ADVANCE_RIP();
5585 } IEM_MC_ELSE() {
5586 IEM_MC_REL_JMP_S32(i32Imm);
5587 } IEM_MC_ENDIF();
5588 IEM_MC_END();
5589 }
5590 return VINF_SUCCESS;
5591}
5592
5593
5594/** Opcode 0x0f 0x90. */
5595FNIEMOP_DEF(iemOp_seto_Eb)
5596{
5597 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5598 IEMOP_HLP_MIN_386();
5599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5600
5601 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5602 * any way. AMD says it's "unused", whatever that means. We're
5603 * ignoring for now. */
5604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5605 {
5606 /* register target */
5607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5608 IEM_MC_BEGIN(0, 0);
5609 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5610 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5611 } IEM_MC_ELSE() {
5612 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5613 } IEM_MC_ENDIF();
5614 IEM_MC_ADVANCE_RIP();
5615 IEM_MC_END();
5616 }
5617 else
5618 {
5619 /* memory target */
5620 IEM_MC_BEGIN(0, 1);
5621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5624 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5625 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5626 } IEM_MC_ELSE() {
5627 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5628 } IEM_MC_ENDIF();
5629 IEM_MC_ADVANCE_RIP();
5630 IEM_MC_END();
5631 }
5632 return VINF_SUCCESS;
5633}
5634
5635
5636/** Opcode 0x0f 0x91. */
5637FNIEMOP_DEF(iemOp_setno_Eb)
5638{
5639 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5640 IEMOP_HLP_MIN_386();
5641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5642
5643 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5644 * any way. AMD says it's "unused", whatever that means. We're
5645 * ignoring for now. */
5646 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5647 {
5648 /* register target */
5649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5650 IEM_MC_BEGIN(0, 0);
5651 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5652 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5653 } IEM_MC_ELSE() {
5654 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5655 } IEM_MC_ENDIF();
5656 IEM_MC_ADVANCE_RIP();
5657 IEM_MC_END();
5658 }
5659 else
5660 {
5661 /* memory target */
5662 IEM_MC_BEGIN(0, 1);
5663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5666 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5667 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5668 } IEM_MC_ELSE() {
5669 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5670 } IEM_MC_ENDIF();
5671 IEM_MC_ADVANCE_RIP();
5672 IEM_MC_END();
5673 }
5674 return VINF_SUCCESS;
5675}
5676
5677
5678/** Opcode 0x0f 0x92. */
5679FNIEMOP_DEF(iemOp_setc_Eb)
5680{
5681 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5682 IEMOP_HLP_MIN_386();
5683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5684
5685 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5686 * any way. AMD says it's "unused", whatever that means. We're
5687 * ignoring for now. */
5688 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5689 {
5690 /* register target */
5691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5692 IEM_MC_BEGIN(0, 0);
5693 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5694 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5695 } IEM_MC_ELSE() {
5696 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5697 } IEM_MC_ENDIF();
5698 IEM_MC_ADVANCE_RIP();
5699 IEM_MC_END();
5700 }
5701 else
5702 {
5703 /* memory target */
5704 IEM_MC_BEGIN(0, 1);
5705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5708 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5709 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5710 } IEM_MC_ELSE() {
5711 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5712 } IEM_MC_ENDIF();
5713 IEM_MC_ADVANCE_RIP();
5714 IEM_MC_END();
5715 }
5716 return VINF_SUCCESS;
5717}
5718
5719
5720/** Opcode 0x0f 0x93. */
5721FNIEMOP_DEF(iemOp_setnc_Eb)
5722{
5723 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5724 IEMOP_HLP_MIN_386();
5725 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5726
5727 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5728 * any way. AMD says it's "unused", whatever that means. We're
5729 * ignoring for now. */
5730 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5731 {
5732 /* register target */
5733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5734 IEM_MC_BEGIN(0, 0);
5735 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5736 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5737 } IEM_MC_ELSE() {
5738 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5739 } IEM_MC_ENDIF();
5740 IEM_MC_ADVANCE_RIP();
5741 IEM_MC_END();
5742 }
5743 else
5744 {
5745 /* memory target */
5746 IEM_MC_BEGIN(0, 1);
5747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5750 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5751 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5752 } IEM_MC_ELSE() {
5753 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5754 } IEM_MC_ENDIF();
5755 IEM_MC_ADVANCE_RIP();
5756 IEM_MC_END();
5757 }
5758 return VINF_SUCCESS;
5759}
5760
5761
5762/** Opcode 0x0f 0x94. */
5763FNIEMOP_DEF(iemOp_sete_Eb)
5764{
5765 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5766 IEMOP_HLP_MIN_386();
5767 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5768
5769 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5770 * any way. AMD says it's "unused", whatever that means. We're
5771 * ignoring for now. */
5772 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5773 {
5774 /* register target */
5775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5776 IEM_MC_BEGIN(0, 0);
5777 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5778 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5779 } IEM_MC_ELSE() {
5780 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5781 } IEM_MC_ENDIF();
5782 IEM_MC_ADVANCE_RIP();
5783 IEM_MC_END();
5784 }
5785 else
5786 {
5787 /* memory target */
5788 IEM_MC_BEGIN(0, 1);
5789 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5790 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5792 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5793 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5794 } IEM_MC_ELSE() {
5795 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5796 } IEM_MC_ENDIF();
5797 IEM_MC_ADVANCE_RIP();
5798 IEM_MC_END();
5799 }
5800 return VINF_SUCCESS;
5801}
5802
5803
5804/** Opcode 0x0f 0x95. */
5805FNIEMOP_DEF(iemOp_setne_Eb)
5806{
5807 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5808 IEMOP_HLP_MIN_386();
5809 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5810
5811 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5812 * any way. AMD says it's "unused", whatever that means. We're
5813 * ignoring for now. */
5814 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5815 {
5816 /* register target */
5817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5818 IEM_MC_BEGIN(0, 0);
5819 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5820 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5821 } IEM_MC_ELSE() {
5822 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5823 } IEM_MC_ENDIF();
5824 IEM_MC_ADVANCE_RIP();
5825 IEM_MC_END();
5826 }
5827 else
5828 {
5829 /* memory target */
5830 IEM_MC_BEGIN(0, 1);
5831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5834 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5835 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5836 } IEM_MC_ELSE() {
5837 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5838 } IEM_MC_ENDIF();
5839 IEM_MC_ADVANCE_RIP();
5840 IEM_MC_END();
5841 }
5842 return VINF_SUCCESS;
5843}
5844
5845
5846/** Opcode 0x0f 0x96. */
5847FNIEMOP_DEF(iemOp_setbe_Eb)
5848{
5849 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5850 IEMOP_HLP_MIN_386();
5851 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5852
5853 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5854 * any way. AMD says it's "unused", whatever that means. We're
5855 * ignoring for now. */
5856 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5857 {
5858 /* register target */
5859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5860 IEM_MC_BEGIN(0, 0);
5861 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5862 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5863 } IEM_MC_ELSE() {
5864 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5865 } IEM_MC_ENDIF();
5866 IEM_MC_ADVANCE_RIP();
5867 IEM_MC_END();
5868 }
5869 else
5870 {
5871 /* memory target */
5872 IEM_MC_BEGIN(0, 1);
5873 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5876 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5877 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5878 } IEM_MC_ELSE() {
5879 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5880 } IEM_MC_ENDIF();
5881 IEM_MC_ADVANCE_RIP();
5882 IEM_MC_END();
5883 }
5884 return VINF_SUCCESS;
5885}
5886
5887
5888/** Opcode 0x0f 0x97. */
5889FNIEMOP_DEF(iemOp_setnbe_Eb)
5890{
5891 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5892 IEMOP_HLP_MIN_386();
5893 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5894
5895 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5896 * any way. AMD says it's "unused", whatever that means. We're
5897 * ignoring for now. */
5898 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5899 {
5900 /* register target */
5901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5902 IEM_MC_BEGIN(0, 0);
5903 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5904 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5905 } IEM_MC_ELSE() {
5906 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5907 } IEM_MC_ENDIF();
5908 IEM_MC_ADVANCE_RIP();
5909 IEM_MC_END();
5910 }
5911 else
5912 {
5913 /* memory target */
5914 IEM_MC_BEGIN(0, 1);
5915 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5916 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5918 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5919 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5920 } IEM_MC_ELSE() {
5921 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5922 } IEM_MC_ENDIF();
5923 IEM_MC_ADVANCE_RIP();
5924 IEM_MC_END();
5925 }
5926 return VINF_SUCCESS;
5927}
5928
5929
5930/** Opcode 0x0f 0x98. */
5931FNIEMOP_DEF(iemOp_sets_Eb)
5932{
5933 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5934 IEMOP_HLP_MIN_386();
5935 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5936
5937 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5938 * any way. AMD says it's "unused", whatever that means. We're
5939 * ignoring for now. */
5940 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5941 {
5942 /* register target */
5943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5944 IEM_MC_BEGIN(0, 0);
5945 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5946 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5947 } IEM_MC_ELSE() {
5948 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5949 } IEM_MC_ENDIF();
5950 IEM_MC_ADVANCE_RIP();
5951 IEM_MC_END();
5952 }
5953 else
5954 {
5955 /* memory target */
5956 IEM_MC_BEGIN(0, 1);
5957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5960 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5961 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5962 } IEM_MC_ELSE() {
5963 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5964 } IEM_MC_ENDIF();
5965 IEM_MC_ADVANCE_RIP();
5966 IEM_MC_END();
5967 }
5968 return VINF_SUCCESS;
5969}
5970
5971
5972/** Opcode 0x0f 0x99. */
5973FNIEMOP_DEF(iemOp_setns_Eb)
5974{
5975 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5976 IEMOP_HLP_MIN_386();
5977 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5978
5979 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5980 * any way. AMD says it's "unused", whatever that means. We're
5981 * ignoring for now. */
5982 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5983 {
5984 /* register target */
5985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5986 IEM_MC_BEGIN(0, 0);
5987 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5988 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5989 } IEM_MC_ELSE() {
5990 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5991 } IEM_MC_ENDIF();
5992 IEM_MC_ADVANCE_RIP();
5993 IEM_MC_END();
5994 }
5995 else
5996 {
5997 /* memory target */
5998 IEM_MC_BEGIN(0, 1);
5999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6002 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6003 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6004 } IEM_MC_ELSE() {
6005 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6006 } IEM_MC_ENDIF();
6007 IEM_MC_ADVANCE_RIP();
6008 IEM_MC_END();
6009 }
6010 return VINF_SUCCESS;
6011}
6012
6013
6014/** Opcode 0x0f 0x9a. */
6015FNIEMOP_DEF(iemOp_setp_Eb)
6016{
6017 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
6018 IEMOP_HLP_MIN_386();
6019 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6020
6021 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6022 * any way. AMD says it's "unused", whatever that means. We're
6023 * ignoring for now. */
6024 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6025 {
6026 /* register target */
6027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6028 IEM_MC_BEGIN(0, 0);
6029 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6030 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6031 } IEM_MC_ELSE() {
6032 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6033 } IEM_MC_ENDIF();
6034 IEM_MC_ADVANCE_RIP();
6035 IEM_MC_END();
6036 }
6037 else
6038 {
6039 /* memory target */
6040 IEM_MC_BEGIN(0, 1);
6041 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6044 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6045 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6046 } IEM_MC_ELSE() {
6047 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6048 } IEM_MC_ENDIF();
6049 IEM_MC_ADVANCE_RIP();
6050 IEM_MC_END();
6051 }
6052 return VINF_SUCCESS;
6053}
6054
6055
6056/** Opcode 0x0f 0x9b. */
6057FNIEMOP_DEF(iemOp_setnp_Eb)
6058{
6059 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
6060 IEMOP_HLP_MIN_386();
6061 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6062
6063 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6064 * any way. AMD says it's "unused", whatever that means. We're
6065 * ignoring for now. */
6066 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6067 {
6068 /* register target */
6069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6070 IEM_MC_BEGIN(0, 0);
6071 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6072 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6073 } IEM_MC_ELSE() {
6074 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6075 } IEM_MC_ENDIF();
6076 IEM_MC_ADVANCE_RIP();
6077 IEM_MC_END();
6078 }
6079 else
6080 {
6081 /* memory target */
6082 IEM_MC_BEGIN(0, 1);
6083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6086 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6087 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6088 } IEM_MC_ELSE() {
6089 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6090 } IEM_MC_ENDIF();
6091 IEM_MC_ADVANCE_RIP();
6092 IEM_MC_END();
6093 }
6094 return VINF_SUCCESS;
6095}
6096
6097
6098/** Opcode 0x0f 0x9c. */
6099FNIEMOP_DEF(iemOp_setl_Eb)
6100{
6101 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
6102 IEMOP_HLP_MIN_386();
6103 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6104
6105 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6106 * any way. AMD says it's "unused", whatever that means. We're
6107 * ignoring for now. */
6108 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6109 {
6110 /* register target */
6111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6112 IEM_MC_BEGIN(0, 0);
6113 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6114 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6115 } IEM_MC_ELSE() {
6116 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6117 } IEM_MC_ENDIF();
6118 IEM_MC_ADVANCE_RIP();
6119 IEM_MC_END();
6120 }
6121 else
6122 {
6123 /* memory target */
6124 IEM_MC_BEGIN(0, 1);
6125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6128 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6129 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6130 } IEM_MC_ELSE() {
6131 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6132 } IEM_MC_ENDIF();
6133 IEM_MC_ADVANCE_RIP();
6134 IEM_MC_END();
6135 }
6136 return VINF_SUCCESS;
6137}
6138
6139
6140/** Opcode 0x0f 0x9d. */
6141FNIEMOP_DEF(iemOp_setnl_Eb)
6142{
6143 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
6144 IEMOP_HLP_MIN_386();
6145 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6146
6147 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6148 * any way. AMD says it's "unused", whatever that means. We're
6149 * ignoring for now. */
6150 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6151 {
6152 /* register target */
6153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6154 IEM_MC_BEGIN(0, 0);
6155 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6156 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6157 } IEM_MC_ELSE() {
6158 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6159 } IEM_MC_ENDIF();
6160 IEM_MC_ADVANCE_RIP();
6161 IEM_MC_END();
6162 }
6163 else
6164 {
6165 /* memory target */
6166 IEM_MC_BEGIN(0, 1);
6167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6170 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6171 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6172 } IEM_MC_ELSE() {
6173 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6174 } IEM_MC_ENDIF();
6175 IEM_MC_ADVANCE_RIP();
6176 IEM_MC_END();
6177 }
6178 return VINF_SUCCESS;
6179}
6180
6181
6182/** Opcode 0x0f 0x9e. */
6183FNIEMOP_DEF(iemOp_setle_Eb)
6184{
6185 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
6186 IEMOP_HLP_MIN_386();
6187 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6188
6189 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6190 * any way. AMD says it's "unused", whatever that means. We're
6191 * ignoring for now. */
6192 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6193 {
6194 /* register target */
6195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6196 IEM_MC_BEGIN(0, 0);
6197 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6198 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6199 } IEM_MC_ELSE() {
6200 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6201 } IEM_MC_ENDIF();
6202 IEM_MC_ADVANCE_RIP();
6203 IEM_MC_END();
6204 }
6205 else
6206 {
6207 /* memory target */
6208 IEM_MC_BEGIN(0, 1);
6209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6212 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6213 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6214 } IEM_MC_ELSE() {
6215 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6216 } IEM_MC_ENDIF();
6217 IEM_MC_ADVANCE_RIP();
6218 IEM_MC_END();
6219 }
6220 return VINF_SUCCESS;
6221}
6222
6223
6224/** Opcode 0x0f 0x9f. */
6225FNIEMOP_DEF(iemOp_setnle_Eb)
6226{
6227 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
6228 IEMOP_HLP_MIN_386();
6229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6230
6231 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6232 * any way. AMD says it's "unused", whatever that means. We're
6233 * ignoring for now. */
6234 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6235 {
6236 /* register target */
6237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6238 IEM_MC_BEGIN(0, 0);
6239 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6240 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6241 } IEM_MC_ELSE() {
6242 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6243 } IEM_MC_ENDIF();
6244 IEM_MC_ADVANCE_RIP();
6245 IEM_MC_END();
6246 }
6247 else
6248 {
6249 /* memory target */
6250 IEM_MC_BEGIN(0, 1);
6251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6254 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6255 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6256 } IEM_MC_ELSE() {
6257 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6258 } IEM_MC_ENDIF();
6259 IEM_MC_ADVANCE_RIP();
6260 IEM_MC_END();
6261 }
6262 return VINF_SUCCESS;
6263}
6264
6265
6266/**
6267 * Common 'push segment-register' helper.
6268 */
6269FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
6270{
6271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6272 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
6273 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6274
6275 switch (pVCpu->iem.s.enmEffOpSize)
6276 {
6277 case IEMMODE_16BIT:
6278 IEM_MC_BEGIN(0, 1);
6279 IEM_MC_LOCAL(uint16_t, u16Value);
6280 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
6281 IEM_MC_PUSH_U16(u16Value);
6282 IEM_MC_ADVANCE_RIP();
6283 IEM_MC_END();
6284 break;
6285
6286 case IEMMODE_32BIT:
6287 IEM_MC_BEGIN(0, 1);
6288 IEM_MC_LOCAL(uint32_t, u32Value);
6289 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
6290 IEM_MC_PUSH_U32_SREG(u32Value);
6291 IEM_MC_ADVANCE_RIP();
6292 IEM_MC_END();
6293 break;
6294
6295 case IEMMODE_64BIT:
6296 IEM_MC_BEGIN(0, 1);
6297 IEM_MC_LOCAL(uint64_t, u64Value);
6298 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
6299 IEM_MC_PUSH_U64(u64Value);
6300 IEM_MC_ADVANCE_RIP();
6301 IEM_MC_END();
6302 break;
6303 }
6304
6305 return VINF_SUCCESS;
6306}
6307
6308
6309/** Opcode 0x0f 0xa0. */
6310FNIEMOP_DEF(iemOp_push_fs)
6311{
6312 IEMOP_MNEMONIC(push_fs, "push fs");
6313 IEMOP_HLP_MIN_386();
6314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6315 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
6316}
6317
6318
6319/** Opcode 0x0f 0xa1. */
6320FNIEMOP_DEF(iemOp_pop_fs)
6321{
6322 IEMOP_MNEMONIC(pop_fs, "pop fs");
6323 IEMOP_HLP_MIN_386();
6324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6325 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
6326}
6327
6328
6329/** Opcode 0x0f 0xa2. */
6330FNIEMOP_DEF(iemOp_cpuid)
6331{
6332 IEMOP_MNEMONIC(cpuid, "cpuid");
6333 IEMOP_HLP_MIN_486(); /* not all 486es. */
6334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6335 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
6336}
6337
6338
6339/**
6340 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
6341 * iemOp_bts_Ev_Gv.
6342 */
6343FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
6344{
6345 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6346 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6347
6348 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6349 {
6350 /* register destination. */
6351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6352 switch (pVCpu->iem.s.enmEffOpSize)
6353 {
6354 case IEMMODE_16BIT:
6355 IEM_MC_BEGIN(3, 0);
6356 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6357 IEM_MC_ARG(uint16_t, u16Src, 1);
6358 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6359
6360 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6361 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
6362 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6363 IEM_MC_REF_EFLAGS(pEFlags);
6364 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6365
6366 IEM_MC_ADVANCE_RIP();
6367 IEM_MC_END();
6368 return VINF_SUCCESS;
6369
6370 case IEMMODE_32BIT:
6371 IEM_MC_BEGIN(3, 0);
6372 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6373 IEM_MC_ARG(uint32_t, u32Src, 1);
6374 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6375
6376 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6377 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
6378 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6379 IEM_MC_REF_EFLAGS(pEFlags);
6380 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6381
6382 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6383 IEM_MC_ADVANCE_RIP();
6384 IEM_MC_END();
6385 return VINF_SUCCESS;
6386
6387 case IEMMODE_64BIT:
6388 IEM_MC_BEGIN(3, 0);
6389 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6390 IEM_MC_ARG(uint64_t, u64Src, 1);
6391 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6392
6393 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6394 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
6395 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6396 IEM_MC_REF_EFLAGS(pEFlags);
6397 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6398
6399 IEM_MC_ADVANCE_RIP();
6400 IEM_MC_END();
6401 return VINF_SUCCESS;
6402
6403 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6404 }
6405 }
6406 else
6407 {
6408 /* memory destination. */
6409
6410 uint32_t fAccess;
6411 if (pImpl->pfnLockedU16)
6412 fAccess = IEM_ACCESS_DATA_RW;
6413 else /* BT */
6414 fAccess = IEM_ACCESS_DATA_R;
6415
6416 /** @todo test negative bit offsets! */
6417 switch (pVCpu->iem.s.enmEffOpSize)
6418 {
6419 case IEMMODE_16BIT:
6420 IEM_MC_BEGIN(3, 2);
6421 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6422 IEM_MC_ARG(uint16_t, u16Src, 1);
6423 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6425 IEM_MC_LOCAL(int16_t, i16AddrAdj);
6426
6427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6428 if (pImpl->pfnLockedU16)
6429 IEMOP_HLP_DONE_DECODING();
6430 else
6431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6432 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6433 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
6434 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
6435 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
6436 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
6437 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
6438 IEM_MC_FETCH_EFLAGS(EFlags);
6439
6440 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6441 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6442 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6443 else
6444 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6445 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6446
6447 IEM_MC_COMMIT_EFLAGS(EFlags);
6448 IEM_MC_ADVANCE_RIP();
6449 IEM_MC_END();
6450 return VINF_SUCCESS;
6451
6452 case IEMMODE_32BIT:
6453 IEM_MC_BEGIN(3, 2);
6454 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6455 IEM_MC_ARG(uint32_t, u32Src, 1);
6456 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6458 IEM_MC_LOCAL(int32_t, i32AddrAdj);
6459
6460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6461 if (pImpl->pfnLockedU16)
6462 IEMOP_HLP_DONE_DECODING();
6463 else
6464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6465 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6466 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
6467 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
6468 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
6469 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
6470 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
6471 IEM_MC_FETCH_EFLAGS(EFlags);
6472
6473 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6474 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6475 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6476 else
6477 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6478 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6479
6480 IEM_MC_COMMIT_EFLAGS(EFlags);
6481 IEM_MC_ADVANCE_RIP();
6482 IEM_MC_END();
6483 return VINF_SUCCESS;
6484
6485 case IEMMODE_64BIT:
6486 IEM_MC_BEGIN(3, 2);
6487 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6488 IEM_MC_ARG(uint64_t, u64Src, 1);
6489 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6491 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6492
6493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6494 if (pImpl->pfnLockedU16)
6495 IEMOP_HLP_DONE_DECODING();
6496 else
6497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6498 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6499 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6500 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6501 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6502 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6503 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6504 IEM_MC_FETCH_EFLAGS(EFlags);
6505
6506 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6507 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6508 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6509 else
6510 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6511 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6512
6513 IEM_MC_COMMIT_EFLAGS(EFlags);
6514 IEM_MC_ADVANCE_RIP();
6515 IEM_MC_END();
6516 return VINF_SUCCESS;
6517
6518 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6519 }
6520 }
6521}
6522
6523
6524/** Opcode 0x0f 0xa3. */
6525FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6526{
6527 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6528 IEMOP_HLP_MIN_386();
6529 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6530}
6531
6532
6533/**
6534 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6535 */
6536FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6537{
6538 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6539 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6540
6541 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6542 {
6543 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6545
6546 switch (pVCpu->iem.s.enmEffOpSize)
6547 {
6548 case IEMMODE_16BIT:
6549 IEM_MC_BEGIN(4, 0);
6550 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6551 IEM_MC_ARG(uint16_t, u16Src, 1);
6552 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6553 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6554
6555 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6556 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6557 IEM_MC_REF_EFLAGS(pEFlags);
6558 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6559
6560 IEM_MC_ADVANCE_RIP();
6561 IEM_MC_END();
6562 return VINF_SUCCESS;
6563
6564 case IEMMODE_32BIT:
6565 IEM_MC_BEGIN(4, 0);
6566 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6567 IEM_MC_ARG(uint32_t, u32Src, 1);
6568 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6569 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6570
6571 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6572 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6573 IEM_MC_REF_EFLAGS(pEFlags);
6574 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6575
6576 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6577 IEM_MC_ADVANCE_RIP();
6578 IEM_MC_END();
6579 return VINF_SUCCESS;
6580
6581 case IEMMODE_64BIT:
6582 IEM_MC_BEGIN(4, 0);
6583 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6584 IEM_MC_ARG(uint64_t, u64Src, 1);
6585 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6586 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6587
6588 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6589 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6590 IEM_MC_REF_EFLAGS(pEFlags);
6591 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6592
6593 IEM_MC_ADVANCE_RIP();
6594 IEM_MC_END();
6595 return VINF_SUCCESS;
6596
6597 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6598 }
6599 }
6600 else
6601 {
6602 switch (pVCpu->iem.s.enmEffOpSize)
6603 {
6604 case IEMMODE_16BIT:
6605 IEM_MC_BEGIN(4, 2);
6606 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6607 IEM_MC_ARG(uint16_t, u16Src, 1);
6608 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6609 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6611
6612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6613 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6614 IEM_MC_ASSIGN(cShiftArg, cShift);
6615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6616 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6617 IEM_MC_FETCH_EFLAGS(EFlags);
6618 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6619 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6620
6621 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6622 IEM_MC_COMMIT_EFLAGS(EFlags);
6623 IEM_MC_ADVANCE_RIP();
6624 IEM_MC_END();
6625 return VINF_SUCCESS;
6626
6627 case IEMMODE_32BIT:
6628 IEM_MC_BEGIN(4, 2);
6629 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6630 IEM_MC_ARG(uint32_t, u32Src, 1);
6631 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6632 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6634
6635 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6636 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6637 IEM_MC_ASSIGN(cShiftArg, cShift);
6638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6639 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6640 IEM_MC_FETCH_EFLAGS(EFlags);
6641 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6642 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6643
6644 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6645 IEM_MC_COMMIT_EFLAGS(EFlags);
6646 IEM_MC_ADVANCE_RIP();
6647 IEM_MC_END();
6648 return VINF_SUCCESS;
6649
6650 case IEMMODE_64BIT:
6651 IEM_MC_BEGIN(4, 2);
6652 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6653 IEM_MC_ARG(uint64_t, u64Src, 1);
6654 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6655 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6656 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6657
6658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6659 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6660 IEM_MC_ASSIGN(cShiftArg, cShift);
6661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6662 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6663 IEM_MC_FETCH_EFLAGS(EFlags);
6664 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6665 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6666
6667 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6668 IEM_MC_COMMIT_EFLAGS(EFlags);
6669 IEM_MC_ADVANCE_RIP();
6670 IEM_MC_END();
6671 return VINF_SUCCESS;
6672
6673 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6674 }
6675 }
6676}
6677
6678
6679/**
6680 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6681 */
6682FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6683{
6684 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6685 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6686
6687 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6688 {
6689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6690
6691 switch (pVCpu->iem.s.enmEffOpSize)
6692 {
6693 case IEMMODE_16BIT:
6694 IEM_MC_BEGIN(4, 0);
6695 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6696 IEM_MC_ARG(uint16_t, u16Src, 1);
6697 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6698 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6699
6700 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6701 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6702 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6703 IEM_MC_REF_EFLAGS(pEFlags);
6704 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6705
6706 IEM_MC_ADVANCE_RIP();
6707 IEM_MC_END();
6708 return VINF_SUCCESS;
6709
6710 case IEMMODE_32BIT:
6711 IEM_MC_BEGIN(4, 0);
6712 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6713 IEM_MC_ARG(uint32_t, u32Src, 1);
6714 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6715 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6716
6717 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6718 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6719 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6720 IEM_MC_REF_EFLAGS(pEFlags);
6721 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6722
6723 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6724 IEM_MC_ADVANCE_RIP();
6725 IEM_MC_END();
6726 return VINF_SUCCESS;
6727
6728 case IEMMODE_64BIT:
6729 IEM_MC_BEGIN(4, 0);
6730 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6731 IEM_MC_ARG(uint64_t, u64Src, 1);
6732 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6733 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6734
6735 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6736 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6737 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6738 IEM_MC_REF_EFLAGS(pEFlags);
6739 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6740
6741 IEM_MC_ADVANCE_RIP();
6742 IEM_MC_END();
6743 return VINF_SUCCESS;
6744
6745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6746 }
6747 }
6748 else
6749 {
6750 switch (pVCpu->iem.s.enmEffOpSize)
6751 {
6752 case IEMMODE_16BIT:
6753 IEM_MC_BEGIN(4, 2);
6754 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6755 IEM_MC_ARG(uint16_t, u16Src, 1);
6756 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6757 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6759
6760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6762 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6763 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6764 IEM_MC_FETCH_EFLAGS(EFlags);
6765 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6766 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6767
6768 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6769 IEM_MC_COMMIT_EFLAGS(EFlags);
6770 IEM_MC_ADVANCE_RIP();
6771 IEM_MC_END();
6772 return VINF_SUCCESS;
6773
6774 case IEMMODE_32BIT:
6775 IEM_MC_BEGIN(4, 2);
6776 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6777 IEM_MC_ARG(uint32_t, u32Src, 1);
6778 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6779 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6781
6782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6784 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6785 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6786 IEM_MC_FETCH_EFLAGS(EFlags);
6787 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6788 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6789
6790 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6791 IEM_MC_COMMIT_EFLAGS(EFlags);
6792 IEM_MC_ADVANCE_RIP();
6793 IEM_MC_END();
6794 return VINF_SUCCESS;
6795
6796 case IEMMODE_64BIT:
6797 IEM_MC_BEGIN(4, 2);
6798 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6799 IEM_MC_ARG(uint64_t, u64Src, 1);
6800 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6801 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6802 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6803
6804 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6806 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6807 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6808 IEM_MC_FETCH_EFLAGS(EFlags);
6809 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6810 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6811
6812 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6813 IEM_MC_COMMIT_EFLAGS(EFlags);
6814 IEM_MC_ADVANCE_RIP();
6815 IEM_MC_END();
6816 return VINF_SUCCESS;
6817
6818 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6819 }
6820 }
6821}
6822
6823
6824
6825/** Opcode 0x0f 0xa4. */
6826FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6827{
6828 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6829 IEMOP_HLP_MIN_386();
6830 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
6831}
6832
6833
6834/** Opcode 0x0f 0xa5. */
6835FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6836{
6837 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6838 IEMOP_HLP_MIN_386();
6839 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
6840}
6841
6842
6843/** Opcode 0x0f 0xa8. */
6844FNIEMOP_DEF(iemOp_push_gs)
6845{
6846 IEMOP_MNEMONIC(push_gs, "push gs");
6847 IEMOP_HLP_MIN_386();
6848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6849 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6850}
6851
6852
6853/** Opcode 0x0f 0xa9. */
6854FNIEMOP_DEF(iemOp_pop_gs)
6855{
6856 IEMOP_MNEMONIC(pop_gs, "pop gs");
6857 IEMOP_HLP_MIN_386();
6858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6859 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6860}
6861
6862
6863/** Opcode 0x0f 0xaa. */
6864FNIEMOP_DEF(iemOp_rsm)
6865{
6866 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
6867 IEMOP_HLP_MIN_386(); /* 386SL and later. */
6868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6869 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
6870}
6871
6872
6873
6874/** Opcode 0x0f 0xab. */
6875FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6876{
6877 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6878 IEMOP_HLP_MIN_386();
6879 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6880}
6881
6882
6883/** Opcode 0x0f 0xac. */
6884FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6885{
6886 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6887 IEMOP_HLP_MIN_386();
6888 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
6889}
6890
6891
6892/** Opcode 0x0f 0xad. */
6893FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6894{
6895 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6896 IEMOP_HLP_MIN_386();
6897 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
6898}
6899
6900
6901/** Opcode 0x0f 0xae mem/0. */
6902FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6903{
6904 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6905 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6906 return IEMOP_RAISE_INVALID_OPCODE();
6907
6908 IEM_MC_BEGIN(3, 1);
6909 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6910 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6911 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6914 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6915 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6916 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6917 IEM_MC_END();
6918 return VINF_SUCCESS;
6919}
6920
6921
6922/** Opcode 0x0f 0xae mem/1. */
6923FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6924{
6925 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6926 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6927 return IEMOP_RAISE_INVALID_OPCODE();
6928
6929 IEM_MC_BEGIN(3, 1);
6930 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6931 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6932 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6935 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6936 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6937 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6938 IEM_MC_END();
6939 return VINF_SUCCESS;
6940}
6941
6942
6943/**
6944 * @opmaps grp15
6945 * @opcode !11/2
6946 * @oppfx none
6947 * @opcpuid sse
6948 * @opgroup og_sse_mxcsrsm
6949 * @opxcpttype 5
6950 * @optest op1=0 -> mxcsr=0
6951 * @optest op1=0x2083 -> mxcsr=0x2083
6952 * @optest op1=0xfffffffe -> value.xcpt=0xd
6953 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6954 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6955 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6956 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6957 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6958 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6959 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6960 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6961 */
6962FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6963{
6964 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6965 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6966 return IEMOP_RAISE_INVALID_OPCODE();
6967
6968 IEM_MC_BEGIN(2, 0);
6969 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6970 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6973 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6974 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6975 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6976 IEM_MC_END();
6977 return VINF_SUCCESS;
6978}
6979
6980
6981/**
6982 * @opmaps grp15
6983 * @opcode !11/3
6984 * @oppfx none
6985 * @opcpuid sse
6986 * @opgroup og_sse_mxcsrsm
6987 * @opxcpttype 5
6988 * @optest mxcsr=0 -> op1=0
6989 * @optest mxcsr=0x2083 -> op1=0x2083
6990 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6991 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6992 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6993 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6994 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6995 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6996 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6997 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6998 */
6999FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
7000{
7001 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7002 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
7003 return IEMOP_RAISE_INVALID_OPCODE();
7004
7005 IEM_MC_BEGIN(2, 0);
7006 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7007 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7010 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7011 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7012 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
7013 IEM_MC_END();
7014 return VINF_SUCCESS;
7015}
7016
7017
7018/**
7019 * @opmaps grp15
7020 * @opcode !11/4
7021 * @oppfx none
7022 * @opcpuid xsave
7023 * @opgroup og_system
7024 * @opxcpttype none
7025 */
7026FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
7027{
7028 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
7029 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
7030 return IEMOP_RAISE_INVALID_OPCODE();
7031
7032 IEM_MC_BEGIN(3, 0);
7033 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7034 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7035 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7038 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7039 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7040 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
7041 IEM_MC_END();
7042 return VINF_SUCCESS;
7043}
7044
7045
7046/**
7047 * @opmaps grp15
7048 * @opcode !11/5
7049 * @oppfx none
7050 * @opcpuid xsave
7051 * @opgroup og_system
7052 * @opxcpttype none
7053 */
7054FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
7055{
7056 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
7057 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
7058 return IEMOP_RAISE_INVALID_OPCODE();
7059
7060 IEM_MC_BEGIN(3, 0);
7061 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7062 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7063 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7066 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7067 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7068 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
7069 IEM_MC_END();
7070 return VINF_SUCCESS;
7071}
7072
7073/** Opcode 0x0f 0xae mem/6. */
7074FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
7075
7076/**
7077 * @opmaps grp15
7078 * @opcode !11/7
7079 * @oppfx none
7080 * @opcpuid clfsh
7081 * @opgroup og_cachectl
7082 * @optest op1=1 ->
7083 */
7084FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
7085{
7086 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7087 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
7088 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
7089
7090 IEM_MC_BEGIN(2, 0);
7091 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7092 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7095 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7096 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
7097 IEM_MC_END();
7098 return VINF_SUCCESS;
7099}
7100
7101/**
7102 * @opmaps grp15
7103 * @opcode !11/7
7104 * @oppfx 0x66
7105 * @opcpuid clflushopt
7106 * @opgroup og_cachectl
7107 * @optest op1=1 ->
7108 */
7109FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
7110{
7111 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7112 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
7113 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
7114
7115 IEM_MC_BEGIN(2, 0);
7116 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7117 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7120 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7121 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
7122 IEM_MC_END();
7123 return VINF_SUCCESS;
7124}
7125
7126
7127/** Opcode 0x0f 0xae 11b/5. */
7128FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
7129{
7130 RT_NOREF_PV(bRm);
7131 IEMOP_MNEMONIC(lfence, "lfence");
7132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7133 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7134 return IEMOP_RAISE_INVALID_OPCODE();
7135
7136 IEM_MC_BEGIN(0, 0);
7137#ifndef RT_ARCH_ARM64
7138 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7139#endif
7140 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
7141#ifndef RT_ARCH_ARM64
7142 else
7143 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7144#endif
7145 IEM_MC_ADVANCE_RIP();
7146 IEM_MC_END();
7147 return VINF_SUCCESS;
7148}
7149
7150
7151/** Opcode 0x0f 0xae 11b/6. */
7152FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
7153{
7154 RT_NOREF_PV(bRm);
7155 IEMOP_MNEMONIC(mfence, "mfence");
7156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7157 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7158 return IEMOP_RAISE_INVALID_OPCODE();
7159
7160 IEM_MC_BEGIN(0, 0);
7161#ifndef RT_ARCH_ARM64
7162 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7163#endif
7164 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
7165#ifndef RT_ARCH_ARM64
7166 else
7167 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7168#endif
7169 IEM_MC_ADVANCE_RIP();
7170 IEM_MC_END();
7171 return VINF_SUCCESS;
7172}
7173
7174
7175/** Opcode 0x0f 0xae 11b/7. */
7176FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
7177{
7178 RT_NOREF_PV(bRm);
7179 IEMOP_MNEMONIC(sfence, "sfence");
7180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7181 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7182 return IEMOP_RAISE_INVALID_OPCODE();
7183
7184 IEM_MC_BEGIN(0, 0);
7185#ifndef RT_ARCH_ARM64
7186 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7187#endif
7188 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
7189#ifndef RT_ARCH_ARM64
7190 else
7191 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7192#endif
7193 IEM_MC_ADVANCE_RIP();
7194 IEM_MC_END();
7195 return VINF_SUCCESS;
7196}
7197
7198
7199/** Opcode 0xf3 0x0f 0xae 11b/0. */
7200FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
7201{
7202 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
7203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7204 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7205 {
7206 IEM_MC_BEGIN(1, 0);
7207 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7208 IEM_MC_ARG(uint64_t, u64Dst, 0);
7209 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
7210 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
7211 IEM_MC_ADVANCE_RIP();
7212 IEM_MC_END();
7213 }
7214 else
7215 {
7216 IEM_MC_BEGIN(1, 0);
7217 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7218 IEM_MC_ARG(uint32_t, u32Dst, 0);
7219 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
7220 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
7221 IEM_MC_ADVANCE_RIP();
7222 IEM_MC_END();
7223 }
7224 return VINF_SUCCESS;
7225}
7226
7227
7228/** Opcode 0xf3 0x0f 0xae 11b/1. */
7229FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
7230{
7231 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
7232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7233 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7234 {
7235 IEM_MC_BEGIN(1, 0);
7236 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7237 IEM_MC_ARG(uint64_t, u64Dst, 0);
7238 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
7239 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
7240 IEM_MC_ADVANCE_RIP();
7241 IEM_MC_END();
7242 }
7243 else
7244 {
7245 IEM_MC_BEGIN(1, 0);
7246 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7247 IEM_MC_ARG(uint32_t, u32Dst, 0);
7248 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
7249 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
7250 IEM_MC_ADVANCE_RIP();
7251 IEM_MC_END();
7252 }
7253 return VINF_SUCCESS;
7254}
7255
7256
7257/** Opcode 0xf3 0x0f 0xae 11b/2. */
7258FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
7259{
7260 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
7261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7262 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7263 {
7264 IEM_MC_BEGIN(1, 0);
7265 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7266 IEM_MC_ARG(uint64_t, u64Dst, 0);
7267 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7268 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7269 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
7270 IEM_MC_ADVANCE_RIP();
7271 IEM_MC_END();
7272 }
7273 else
7274 {
7275 IEM_MC_BEGIN(1, 0);
7276 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7277 IEM_MC_ARG(uint32_t, u32Dst, 0);
7278 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7279 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
7280 IEM_MC_ADVANCE_RIP();
7281 IEM_MC_END();
7282 }
7283 return VINF_SUCCESS;
7284}
7285
7286
7287/** Opcode 0xf3 0x0f 0xae 11b/3. */
7288FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
7289{
7290 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
7291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7292 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7293 {
7294 IEM_MC_BEGIN(1, 0);
7295 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7296 IEM_MC_ARG(uint64_t, u64Dst, 0);
7297 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7298 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7299 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
7300 IEM_MC_ADVANCE_RIP();
7301 IEM_MC_END();
7302 }
7303 else
7304 {
7305 IEM_MC_BEGIN(1, 0);
7306 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7307 IEM_MC_ARG(uint32_t, u32Dst, 0);
7308 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7309 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
7310 IEM_MC_ADVANCE_RIP();
7311 IEM_MC_END();
7312 }
7313 return VINF_SUCCESS;
7314}
7315
7316
7317/**
7318 * Group 15 jump table for register variant.
7319 */
7320IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
7321{ /* pfx: none, 066h, 0f3h, 0f2h */
7322 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
7323 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
7324 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
7325 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
7326 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7327 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7328 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7329 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7330};
7331AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
7332
7333
7334/**
7335 * Group 15 jump table for memory variant.
7336 */
7337IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
7338{ /* pfx: none, 066h, 0f3h, 0f2h */
7339 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7340 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7341 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7342 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7343 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7344 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7345 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7346 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7347};
7348AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
7349
7350
7351/** Opcode 0x0f 0xae. */
7352FNIEMOP_DEF(iemOp_Grp15)
7353{
7354 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
7355 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7356 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7357 /* register, register */
7358 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7359 + pVCpu->iem.s.idxPrefix], bRm);
7360 /* memory, register */
7361 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7362 + pVCpu->iem.s.idxPrefix], bRm);
7363}
7364
7365
7366/** Opcode 0x0f 0xaf. */
7367FNIEMOP_DEF(iemOp_imul_Gv_Ev)
7368{
7369 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
7370 IEMOP_HLP_MIN_386();
7371 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7372 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags));
7373}
7374
7375
7376/** Opcode 0x0f 0xb0. */
7377FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
7378{
7379 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
7380 IEMOP_HLP_MIN_486();
7381 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7382
7383 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7384 {
7385 IEMOP_HLP_DONE_DECODING();
7386 IEM_MC_BEGIN(4, 0);
7387 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7388 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7389 IEM_MC_ARG(uint8_t, u8Src, 2);
7390 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7391
7392 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7393 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7394 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
7395 IEM_MC_REF_EFLAGS(pEFlags);
7396 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7397 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7398 else
7399 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7400
7401 IEM_MC_ADVANCE_RIP();
7402 IEM_MC_END();
7403 }
7404 else
7405 {
7406 IEM_MC_BEGIN(4, 3);
7407 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7408 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7409 IEM_MC_ARG(uint8_t, u8Src, 2);
7410 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7412 IEM_MC_LOCAL(uint8_t, u8Al);
7413
7414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7415 IEMOP_HLP_DONE_DECODING();
7416 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7417 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7418 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
7419 IEM_MC_FETCH_EFLAGS(EFlags);
7420 IEM_MC_REF_LOCAL(pu8Al, u8Al);
7421 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7422 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7423 else
7424 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7425
7426 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7427 IEM_MC_COMMIT_EFLAGS(EFlags);
7428 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
7429 IEM_MC_ADVANCE_RIP();
7430 IEM_MC_END();
7431 }
7432 return VINF_SUCCESS;
7433}
7434
7435/** Opcode 0x0f 0xb1. */
7436FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
7437{
7438 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
7439 IEMOP_HLP_MIN_486();
7440 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7441
7442 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7443 {
7444 IEMOP_HLP_DONE_DECODING();
7445 switch (pVCpu->iem.s.enmEffOpSize)
7446 {
7447 case IEMMODE_16BIT:
7448 IEM_MC_BEGIN(4, 0);
7449 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7450 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7451 IEM_MC_ARG(uint16_t, u16Src, 2);
7452 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7453
7454 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7455 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7456 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
7457 IEM_MC_REF_EFLAGS(pEFlags);
7458 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7459 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7460 else
7461 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7462
7463 IEM_MC_ADVANCE_RIP();
7464 IEM_MC_END();
7465 return VINF_SUCCESS;
7466
7467 case IEMMODE_32BIT:
7468 IEM_MC_BEGIN(4, 0);
7469 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7470 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7471 IEM_MC_ARG(uint32_t, u32Src, 2);
7472 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7473
7474 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7475 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7476 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
7477 IEM_MC_REF_EFLAGS(pEFlags);
7478 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7479 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7480 else
7481 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7482
7483 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
7484 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7485 IEM_MC_ADVANCE_RIP();
7486 IEM_MC_END();
7487 return VINF_SUCCESS;
7488
7489 case IEMMODE_64BIT:
7490 IEM_MC_BEGIN(4, 0);
7491 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7492 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7493#ifdef RT_ARCH_X86
7494 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7495#else
7496 IEM_MC_ARG(uint64_t, u64Src, 2);
7497#endif
7498 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7499
7500 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7501 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
7502 IEM_MC_REF_EFLAGS(pEFlags);
7503#ifdef RT_ARCH_X86
7504 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7505 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7506 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7507 else
7508 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7509#else
7510 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7511 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7512 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7513 else
7514 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7515#endif
7516
7517 IEM_MC_ADVANCE_RIP();
7518 IEM_MC_END();
7519 return VINF_SUCCESS;
7520
7521 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7522 }
7523 }
7524 else
7525 {
7526 switch (pVCpu->iem.s.enmEffOpSize)
7527 {
7528 case IEMMODE_16BIT:
7529 IEM_MC_BEGIN(4, 3);
7530 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7531 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7532 IEM_MC_ARG(uint16_t, u16Src, 2);
7533 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7534 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7535 IEM_MC_LOCAL(uint16_t, u16Ax);
7536
7537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7538 IEMOP_HLP_DONE_DECODING();
7539 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7540 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7541 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
7542 IEM_MC_FETCH_EFLAGS(EFlags);
7543 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
7544 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7545 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7546 else
7547 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7548
7549 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7550 IEM_MC_COMMIT_EFLAGS(EFlags);
7551 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
7552 IEM_MC_ADVANCE_RIP();
7553 IEM_MC_END();
7554 return VINF_SUCCESS;
7555
7556 case IEMMODE_32BIT:
7557 IEM_MC_BEGIN(4, 3);
7558 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7559 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7560 IEM_MC_ARG(uint32_t, u32Src, 2);
7561 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7563 IEM_MC_LOCAL(uint32_t, u32Eax);
7564
7565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7566 IEMOP_HLP_DONE_DECODING();
7567 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7568 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7569 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
7570 IEM_MC_FETCH_EFLAGS(EFlags);
7571 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
7572 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7573 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7574 else
7575 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7576
7577 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7578 IEM_MC_COMMIT_EFLAGS(EFlags);
7579 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
7580 IEM_MC_ADVANCE_RIP();
7581 IEM_MC_END();
7582 return VINF_SUCCESS;
7583
7584 case IEMMODE_64BIT:
7585 IEM_MC_BEGIN(4, 3);
7586 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7587 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7588#ifdef RT_ARCH_X86
7589 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7590#else
7591 IEM_MC_ARG(uint64_t, u64Src, 2);
7592#endif
7593 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7595 IEM_MC_LOCAL(uint64_t, u64Rax);
7596
7597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7598 IEMOP_HLP_DONE_DECODING();
7599 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7600 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
7601 IEM_MC_FETCH_EFLAGS(EFlags);
7602 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
7603#ifdef RT_ARCH_X86
7604 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7605 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7606 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7607 else
7608 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7609#else
7610 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7611 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7612 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7613 else
7614 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7615#endif
7616
7617 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7618 IEM_MC_COMMIT_EFLAGS(EFlags);
7619 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
7620 IEM_MC_ADVANCE_RIP();
7621 IEM_MC_END();
7622 return VINF_SUCCESS;
7623
7624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7625 }
7626 }
7627}
7628
7629
7630FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
7631{
7632 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
7633 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
7634
7635 switch (pVCpu->iem.s.enmEffOpSize)
7636 {
7637 case IEMMODE_16BIT:
7638 IEM_MC_BEGIN(5, 1);
7639 IEM_MC_ARG(uint16_t, uSel, 0);
7640 IEM_MC_ARG(uint16_t, offSeg, 1);
7641 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7642 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7643 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7644 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7647 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7648 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
7649 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7650 IEM_MC_END();
7651 return VINF_SUCCESS;
7652
7653 case IEMMODE_32BIT:
7654 IEM_MC_BEGIN(5, 1);
7655 IEM_MC_ARG(uint16_t, uSel, 0);
7656 IEM_MC_ARG(uint32_t, offSeg, 1);
7657 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7658 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7659 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7660 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7663 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7664 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
7665 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7666 IEM_MC_END();
7667 return VINF_SUCCESS;
7668
7669 case IEMMODE_64BIT:
7670 IEM_MC_BEGIN(5, 1);
7671 IEM_MC_ARG(uint16_t, uSel, 0);
7672 IEM_MC_ARG(uint64_t, offSeg, 1);
7673 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7674 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7675 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7676 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7679 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
7680 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7681 else
7682 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7683 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
7684 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7685 IEM_MC_END();
7686 return VINF_SUCCESS;
7687
7688 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7689 }
7690}
7691
7692
7693/** Opcode 0x0f 0xb2. */
7694FNIEMOP_DEF(iemOp_lss_Gv_Mp)
7695{
7696 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
7697 IEMOP_HLP_MIN_386();
7698 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7699 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7700 return IEMOP_RAISE_INVALID_OPCODE();
7701 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
7702}
7703
7704
7705/** Opcode 0x0f 0xb3. */
7706FNIEMOP_DEF(iemOp_btr_Ev_Gv)
7707{
7708 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
7709 IEMOP_HLP_MIN_386();
7710 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
7711}
7712
7713
7714/** Opcode 0x0f 0xb4. */
7715FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
7716{
7717 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
7718 IEMOP_HLP_MIN_386();
7719 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7720 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7721 return IEMOP_RAISE_INVALID_OPCODE();
7722 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
7723}
7724
7725
7726/** Opcode 0x0f 0xb5. */
7727FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
7728{
7729 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
7730 IEMOP_HLP_MIN_386();
7731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7732 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7733 return IEMOP_RAISE_INVALID_OPCODE();
7734 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
7735}
7736
7737
7738/** Opcode 0x0f 0xb6. */
7739FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
7740{
7741 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
7742 IEMOP_HLP_MIN_386();
7743
7744 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7745
7746 /*
7747 * If rm is denoting a register, no more instruction bytes.
7748 */
7749 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7750 {
7751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7752 switch (pVCpu->iem.s.enmEffOpSize)
7753 {
7754 case IEMMODE_16BIT:
7755 IEM_MC_BEGIN(0, 1);
7756 IEM_MC_LOCAL(uint16_t, u16Value);
7757 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7758 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7759 IEM_MC_ADVANCE_RIP();
7760 IEM_MC_END();
7761 return VINF_SUCCESS;
7762
7763 case IEMMODE_32BIT:
7764 IEM_MC_BEGIN(0, 1);
7765 IEM_MC_LOCAL(uint32_t, u32Value);
7766 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7767 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7768 IEM_MC_ADVANCE_RIP();
7769 IEM_MC_END();
7770 return VINF_SUCCESS;
7771
7772 case IEMMODE_64BIT:
7773 IEM_MC_BEGIN(0, 1);
7774 IEM_MC_LOCAL(uint64_t, u64Value);
7775 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7776 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7777 IEM_MC_ADVANCE_RIP();
7778 IEM_MC_END();
7779 return VINF_SUCCESS;
7780
7781 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7782 }
7783 }
7784 else
7785 {
7786 /*
7787 * We're loading a register from memory.
7788 */
7789 switch (pVCpu->iem.s.enmEffOpSize)
7790 {
7791 case IEMMODE_16BIT:
7792 IEM_MC_BEGIN(0, 2);
7793 IEM_MC_LOCAL(uint16_t, u16Value);
7794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7797 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7798 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7799 IEM_MC_ADVANCE_RIP();
7800 IEM_MC_END();
7801 return VINF_SUCCESS;
7802
7803 case IEMMODE_32BIT:
7804 IEM_MC_BEGIN(0, 2);
7805 IEM_MC_LOCAL(uint32_t, u32Value);
7806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7809 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7810 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7811 IEM_MC_ADVANCE_RIP();
7812 IEM_MC_END();
7813 return VINF_SUCCESS;
7814
7815 case IEMMODE_64BIT:
7816 IEM_MC_BEGIN(0, 2);
7817 IEM_MC_LOCAL(uint64_t, u64Value);
7818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7821 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7822 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7823 IEM_MC_ADVANCE_RIP();
7824 IEM_MC_END();
7825 return VINF_SUCCESS;
7826
7827 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7828 }
7829 }
7830}
7831
7832
7833/** Opcode 0x0f 0xb7. */
7834FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7835{
7836 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7837 IEMOP_HLP_MIN_386();
7838
7839 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7840
7841 /** @todo Not entirely sure how the operand size prefix is handled here,
7842 * assuming that it will be ignored. Would be nice to have a few
7843 * test for this. */
7844 /*
7845 * If rm is denoting a register, no more instruction bytes.
7846 */
7847 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7848 {
7849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7850 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7851 {
7852 IEM_MC_BEGIN(0, 1);
7853 IEM_MC_LOCAL(uint32_t, u32Value);
7854 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7855 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7856 IEM_MC_ADVANCE_RIP();
7857 IEM_MC_END();
7858 }
7859 else
7860 {
7861 IEM_MC_BEGIN(0, 1);
7862 IEM_MC_LOCAL(uint64_t, u64Value);
7863 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7864 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7865 IEM_MC_ADVANCE_RIP();
7866 IEM_MC_END();
7867 }
7868 }
7869 else
7870 {
7871 /*
7872 * We're loading a register from memory.
7873 */
7874 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7875 {
7876 IEM_MC_BEGIN(0, 2);
7877 IEM_MC_LOCAL(uint32_t, u32Value);
7878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7879 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7881 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7882 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7883 IEM_MC_ADVANCE_RIP();
7884 IEM_MC_END();
7885 }
7886 else
7887 {
7888 IEM_MC_BEGIN(0, 2);
7889 IEM_MC_LOCAL(uint64_t, u64Value);
7890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7893 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7894 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7895 IEM_MC_ADVANCE_RIP();
7896 IEM_MC_END();
7897 }
7898 }
7899 return VINF_SUCCESS;
7900}
7901
7902
7903/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7904FNIEMOP_UD_STUB(iemOp_jmpe);
7905
7906
7907/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7908FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
7909{
7910 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
7911 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
7912 return iemOp_InvalidNeedRM(pVCpu);
7913#ifndef TST_IEM_CHECK_MC
7914# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
7915 static const IEMOPBINSIZES s_Native =
7916 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
7917# endif
7918 static const IEMOPBINSIZES s_Fallback =
7919 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
7920#endif
7921 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback));
7922}
7923
7924
7925/**
7926 * @opcode 0xb9
7927 * @opinvalid intel-modrm
7928 * @optest ->
7929 */
7930FNIEMOP_DEF(iemOp_Grp10)
7931{
7932 /*
7933 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7934 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7935 */
7936 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7937 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
7938 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7939}
7940
7941
7942/** Opcode 0x0f 0xba. */
7943FNIEMOP_DEF(iemOp_Grp8)
7944{
7945 IEMOP_HLP_MIN_386();
7946 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7947 PCIEMOPBINSIZES pImpl;
7948 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7949 {
7950 case 0: case 1: case 2: case 3:
7951 /* Both AMD and Intel want full modr/m decoding and imm8. */
7952 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7953 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7954 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7955 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7956 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7957 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7958 }
7959 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7960
7961 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7962 {
7963 /* register destination. */
7964 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7966
7967 switch (pVCpu->iem.s.enmEffOpSize)
7968 {
7969 case IEMMODE_16BIT:
7970 IEM_MC_BEGIN(3, 0);
7971 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7972 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7973 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7974
7975 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7976 IEM_MC_REF_EFLAGS(pEFlags);
7977 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7978
7979 IEM_MC_ADVANCE_RIP();
7980 IEM_MC_END();
7981 return VINF_SUCCESS;
7982
7983 case IEMMODE_32BIT:
7984 IEM_MC_BEGIN(3, 0);
7985 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7986 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7987 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7988
7989 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7990 IEM_MC_REF_EFLAGS(pEFlags);
7991 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7992
7993 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7994 IEM_MC_ADVANCE_RIP();
7995 IEM_MC_END();
7996 return VINF_SUCCESS;
7997
7998 case IEMMODE_64BIT:
7999 IEM_MC_BEGIN(3, 0);
8000 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8001 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
8002 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8003
8004 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8005 IEM_MC_REF_EFLAGS(pEFlags);
8006 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8007
8008 IEM_MC_ADVANCE_RIP();
8009 IEM_MC_END();
8010 return VINF_SUCCESS;
8011
8012 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8013 }
8014 }
8015 else
8016 {
8017 /* memory destination. */
8018
8019 uint32_t fAccess;
8020 if (pImpl->pfnLockedU16)
8021 fAccess = IEM_ACCESS_DATA_RW;
8022 else /* BT */
8023 fAccess = IEM_ACCESS_DATA_R;
8024
8025 /** @todo test negative bit offsets! */
8026 switch (pVCpu->iem.s.enmEffOpSize)
8027 {
8028 case IEMMODE_16BIT:
8029 IEM_MC_BEGIN(3, 1);
8030 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8031 IEM_MC_ARG(uint16_t, u16Src, 1);
8032 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8033 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8034
8035 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8036 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8037 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
8038 if (pImpl->pfnLockedU16)
8039 IEMOP_HLP_DONE_DECODING();
8040 else
8041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8042 IEM_MC_FETCH_EFLAGS(EFlags);
8043 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8044 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8045 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8046 else
8047 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
8048 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
8049
8050 IEM_MC_COMMIT_EFLAGS(EFlags);
8051 IEM_MC_ADVANCE_RIP();
8052 IEM_MC_END();
8053 return VINF_SUCCESS;
8054
8055 case IEMMODE_32BIT:
8056 IEM_MC_BEGIN(3, 1);
8057 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8058 IEM_MC_ARG(uint32_t, u32Src, 1);
8059 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8061
8062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8063 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8064 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
8065 if (pImpl->pfnLockedU16)
8066 IEMOP_HLP_DONE_DECODING();
8067 else
8068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8069 IEM_MC_FETCH_EFLAGS(EFlags);
8070 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8071 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8072 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8073 else
8074 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
8075 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
8076
8077 IEM_MC_COMMIT_EFLAGS(EFlags);
8078 IEM_MC_ADVANCE_RIP();
8079 IEM_MC_END();
8080 return VINF_SUCCESS;
8081
8082 case IEMMODE_64BIT:
8083 IEM_MC_BEGIN(3, 1);
8084 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8085 IEM_MC_ARG(uint64_t, u64Src, 1);
8086 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8088
8089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8090 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8091 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
8092 if (pImpl->pfnLockedU16)
8093 IEMOP_HLP_DONE_DECODING();
8094 else
8095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8096 IEM_MC_FETCH_EFLAGS(EFlags);
8097 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8098 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8099 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8100 else
8101 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
8102 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
8103
8104 IEM_MC_COMMIT_EFLAGS(EFlags);
8105 IEM_MC_ADVANCE_RIP();
8106 IEM_MC_END();
8107 return VINF_SUCCESS;
8108
8109 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8110 }
8111 }
8112}
8113
8114
8115/** Opcode 0x0f 0xbb. */
8116FNIEMOP_DEF(iemOp_btc_Ev_Gv)
8117{
8118 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
8119 IEMOP_HLP_MIN_386();
8120 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
8121}
8122
8123
8124/**
8125 * Common worker for BSF and BSR instructions.
8126 *
8127 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
8128 * the destination register, which means that for 32-bit operations the high
8129 * bits must be left alone.
8130 *
8131 * @param pImpl Pointer to the instruction implementation (assembly).
8132 */
8133FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
8134{
8135 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8136
8137 /*
8138 * If rm is denoting a register, no more instruction bytes.
8139 */
8140 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8141 {
8142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8143 switch (pVCpu->iem.s.enmEffOpSize)
8144 {
8145 case IEMMODE_16BIT:
8146 IEM_MC_BEGIN(3, 0);
8147 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8148 IEM_MC_ARG(uint16_t, u16Src, 1);
8149 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8150
8151 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8152 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8153 IEM_MC_REF_EFLAGS(pEFlags);
8154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8155
8156 IEM_MC_ADVANCE_RIP();
8157 IEM_MC_END();
8158 break;
8159
8160 case IEMMODE_32BIT:
8161 IEM_MC_BEGIN(3, 0);
8162 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8163 IEM_MC_ARG(uint32_t, u32Src, 1);
8164 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8165
8166 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8167 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8168 IEM_MC_REF_EFLAGS(pEFlags);
8169 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8170 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8171 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8172 IEM_MC_ENDIF();
8173 IEM_MC_ADVANCE_RIP();
8174 IEM_MC_END();
8175 break;
8176
8177 case IEMMODE_64BIT:
8178 IEM_MC_BEGIN(3, 0);
8179 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8180 IEM_MC_ARG(uint64_t, u64Src, 1);
8181 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8182
8183 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8184 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8185 IEM_MC_REF_EFLAGS(pEFlags);
8186 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8187
8188 IEM_MC_ADVANCE_RIP();
8189 IEM_MC_END();
8190 break;
8191 }
8192 }
8193 else
8194 {
8195 /*
8196 * We're accessing memory.
8197 */
8198 switch (pVCpu->iem.s.enmEffOpSize)
8199 {
8200 case IEMMODE_16BIT:
8201 IEM_MC_BEGIN(3, 1);
8202 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8203 IEM_MC_ARG(uint16_t, u16Src, 1);
8204 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8205 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8206
8207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8209 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8210 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8211 IEM_MC_REF_EFLAGS(pEFlags);
8212 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8213
8214 IEM_MC_ADVANCE_RIP();
8215 IEM_MC_END();
8216 break;
8217
8218 case IEMMODE_32BIT:
8219 IEM_MC_BEGIN(3, 1);
8220 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8221 IEM_MC_ARG(uint32_t, u32Src, 1);
8222 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8223 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8224
8225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8227 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8228 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8229 IEM_MC_REF_EFLAGS(pEFlags);
8230 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8231
8232 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8233 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8234 IEM_MC_ENDIF();
8235 IEM_MC_ADVANCE_RIP();
8236 IEM_MC_END();
8237 break;
8238
8239 case IEMMODE_64BIT:
8240 IEM_MC_BEGIN(3, 1);
8241 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8242 IEM_MC_ARG(uint64_t, u64Src, 1);
8243 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8245
8246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8248 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8249 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8250 IEM_MC_REF_EFLAGS(pEFlags);
8251 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8252
8253 IEM_MC_ADVANCE_RIP();
8254 IEM_MC_END();
8255 break;
8256 }
8257 }
8258 return VINF_SUCCESS;
8259}
8260
8261
8262/** Opcode 0x0f 0xbc. */
8263FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
8264{
8265 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
8266 IEMOP_HLP_MIN_386();
8267 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
8268 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
8269}
8270
8271
8272/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
8273FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
8274{
8275 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
8276 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
8277 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
8278
8279#ifndef TST_IEM_CHECK_MC
8280 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
8281 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
8282 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
8283 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
8284 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
8285 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
8286 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
8287 {
8288 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
8289 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
8290 };
8291#endif
8292 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
8293 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
8294 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
8295}
8296
8297
8298/** Opcode 0x0f 0xbd. */
8299FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
8300{
8301 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
8302 IEMOP_HLP_MIN_386();
8303 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
8304 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
8305}
8306
8307
8308/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
8309FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
8310{
8311 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
8312 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
8313 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
8314
8315#ifndef TST_IEM_CHECK_MC
8316 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
8317 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
8318 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
8319 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
8320 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
8321 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
8322 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
8323 {
8324 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
8325 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
8326 };
8327#endif
8328 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
8329 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
8330 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
8331}
8332
8333
8334
8335/** Opcode 0x0f 0xbe. */
8336FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
8337{
8338 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
8339 IEMOP_HLP_MIN_386();
8340
8341 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8342
8343 /*
8344 * If rm is denoting a register, no more instruction bytes.
8345 */
8346 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8347 {
8348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8349 switch (pVCpu->iem.s.enmEffOpSize)
8350 {
8351 case IEMMODE_16BIT:
8352 IEM_MC_BEGIN(0, 1);
8353 IEM_MC_LOCAL(uint16_t, u16Value);
8354 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8355 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
8356 IEM_MC_ADVANCE_RIP();
8357 IEM_MC_END();
8358 return VINF_SUCCESS;
8359
8360 case IEMMODE_32BIT:
8361 IEM_MC_BEGIN(0, 1);
8362 IEM_MC_LOCAL(uint32_t, u32Value);
8363 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8364 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8365 IEM_MC_ADVANCE_RIP();
8366 IEM_MC_END();
8367 return VINF_SUCCESS;
8368
8369 case IEMMODE_64BIT:
8370 IEM_MC_BEGIN(0, 1);
8371 IEM_MC_LOCAL(uint64_t, u64Value);
8372 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8373 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8374 IEM_MC_ADVANCE_RIP();
8375 IEM_MC_END();
8376 return VINF_SUCCESS;
8377
8378 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8379 }
8380 }
8381 else
8382 {
8383 /*
8384 * We're loading a register from memory.
8385 */
8386 switch (pVCpu->iem.s.enmEffOpSize)
8387 {
8388 case IEMMODE_16BIT:
8389 IEM_MC_BEGIN(0, 2);
8390 IEM_MC_LOCAL(uint16_t, u16Value);
8391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8394 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8395 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
8396 IEM_MC_ADVANCE_RIP();
8397 IEM_MC_END();
8398 return VINF_SUCCESS;
8399
8400 case IEMMODE_32BIT:
8401 IEM_MC_BEGIN(0, 2);
8402 IEM_MC_LOCAL(uint32_t, u32Value);
8403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8404 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8406 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8407 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8408 IEM_MC_ADVANCE_RIP();
8409 IEM_MC_END();
8410 return VINF_SUCCESS;
8411
8412 case IEMMODE_64BIT:
8413 IEM_MC_BEGIN(0, 2);
8414 IEM_MC_LOCAL(uint64_t, u64Value);
8415 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8418 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8419 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8420 IEM_MC_ADVANCE_RIP();
8421 IEM_MC_END();
8422 return VINF_SUCCESS;
8423
8424 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8425 }
8426 }
8427}
8428
8429
8430/** Opcode 0x0f 0xbf. */
8431FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
8432{
8433 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
8434 IEMOP_HLP_MIN_386();
8435
8436 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8437
8438 /** @todo Not entirely sure how the operand size prefix is handled here,
8439 * assuming that it will be ignored. Would be nice to have a few
8440 * test for this. */
8441 /*
8442 * If rm is denoting a register, no more instruction bytes.
8443 */
8444 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8445 {
8446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8447 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8448 {
8449 IEM_MC_BEGIN(0, 1);
8450 IEM_MC_LOCAL(uint32_t, u32Value);
8451 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8452 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8453 IEM_MC_ADVANCE_RIP();
8454 IEM_MC_END();
8455 }
8456 else
8457 {
8458 IEM_MC_BEGIN(0, 1);
8459 IEM_MC_LOCAL(uint64_t, u64Value);
8460 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8461 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8462 IEM_MC_ADVANCE_RIP();
8463 IEM_MC_END();
8464 }
8465 }
8466 else
8467 {
8468 /*
8469 * We're loading a register from memory.
8470 */
8471 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8472 {
8473 IEM_MC_BEGIN(0, 2);
8474 IEM_MC_LOCAL(uint32_t, u32Value);
8475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8478 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8479 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8480 IEM_MC_ADVANCE_RIP();
8481 IEM_MC_END();
8482 }
8483 else
8484 {
8485 IEM_MC_BEGIN(0, 2);
8486 IEM_MC_LOCAL(uint64_t, u64Value);
8487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8490 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8491 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8492 IEM_MC_ADVANCE_RIP();
8493 IEM_MC_END();
8494 }
8495 }
8496 return VINF_SUCCESS;
8497}
8498
8499
8500/** Opcode 0x0f 0xc0. */
8501FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
8502{
8503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8504 IEMOP_HLP_MIN_486();
8505 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
8506
8507 /*
8508 * If rm is denoting a register, no more instruction bytes.
8509 */
8510 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8511 {
8512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8513
8514 IEM_MC_BEGIN(3, 0);
8515 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8516 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8517 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8518
8519 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8520 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8521 IEM_MC_REF_EFLAGS(pEFlags);
8522 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8523
8524 IEM_MC_ADVANCE_RIP();
8525 IEM_MC_END();
8526 }
8527 else
8528 {
8529 /*
8530 * We're accessing memory.
8531 */
8532 IEM_MC_BEGIN(3, 3);
8533 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8534 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8535 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8536 IEM_MC_LOCAL(uint8_t, u8RegCopy);
8537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8538
8539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8540 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8541 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8542 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
8543 IEM_MC_FETCH_EFLAGS(EFlags);
8544 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8545 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8546 else
8547 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
8548
8549 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
8550 IEM_MC_COMMIT_EFLAGS(EFlags);
8551 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
8552 IEM_MC_ADVANCE_RIP();
8553 IEM_MC_END();
8554 return VINF_SUCCESS;
8555 }
8556 return VINF_SUCCESS;
8557}
8558
8559
8560/** Opcode 0x0f 0xc1. */
8561FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
8562{
8563 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
8564 IEMOP_HLP_MIN_486();
8565 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8566
8567 /*
8568 * If rm is denoting a register, no more instruction bytes.
8569 */
8570 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8571 {
8572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8573
8574 switch (pVCpu->iem.s.enmEffOpSize)
8575 {
8576 case IEMMODE_16BIT:
8577 IEM_MC_BEGIN(3, 0);
8578 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8579 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8580 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8581
8582 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8583 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8584 IEM_MC_REF_EFLAGS(pEFlags);
8585 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8586
8587 IEM_MC_ADVANCE_RIP();
8588 IEM_MC_END();
8589 return VINF_SUCCESS;
8590
8591 case IEMMODE_32BIT:
8592 IEM_MC_BEGIN(3, 0);
8593 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8594 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8595 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8596
8597 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8598 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8599 IEM_MC_REF_EFLAGS(pEFlags);
8600 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8601
8602 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8603 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
8604 IEM_MC_ADVANCE_RIP();
8605 IEM_MC_END();
8606 return VINF_SUCCESS;
8607
8608 case IEMMODE_64BIT:
8609 IEM_MC_BEGIN(3, 0);
8610 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8611 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8612 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8613
8614 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8615 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8616 IEM_MC_REF_EFLAGS(pEFlags);
8617 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8618
8619 IEM_MC_ADVANCE_RIP();
8620 IEM_MC_END();
8621 return VINF_SUCCESS;
8622
8623 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8624 }
8625 }
8626 else
8627 {
8628 /*
8629 * We're accessing memory.
8630 */
8631 switch (pVCpu->iem.s.enmEffOpSize)
8632 {
8633 case IEMMODE_16BIT:
8634 IEM_MC_BEGIN(3, 3);
8635 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8636 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8637 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8638 IEM_MC_LOCAL(uint16_t, u16RegCopy);
8639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8640
8641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8642 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8643 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8644 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
8645 IEM_MC_FETCH_EFLAGS(EFlags);
8646 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8647 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8648 else
8649 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
8650
8651 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8652 IEM_MC_COMMIT_EFLAGS(EFlags);
8653 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
8654 IEM_MC_ADVANCE_RIP();
8655 IEM_MC_END();
8656 return VINF_SUCCESS;
8657
8658 case IEMMODE_32BIT:
8659 IEM_MC_BEGIN(3, 3);
8660 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8661 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8662 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8663 IEM_MC_LOCAL(uint32_t, u32RegCopy);
8664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8665
8666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8667 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8668 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8669 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
8670 IEM_MC_FETCH_EFLAGS(EFlags);
8671 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8672 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8673 else
8674 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
8675
8676 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8677 IEM_MC_COMMIT_EFLAGS(EFlags);
8678 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
8679 IEM_MC_ADVANCE_RIP();
8680 IEM_MC_END();
8681 return VINF_SUCCESS;
8682
8683 case IEMMODE_64BIT:
8684 IEM_MC_BEGIN(3, 3);
8685 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8686 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8687 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8688 IEM_MC_LOCAL(uint64_t, u64RegCopy);
8689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8690
8691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8692 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8693 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8694 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
8695 IEM_MC_FETCH_EFLAGS(EFlags);
8696 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8697 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8698 else
8699 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
8700
8701 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8702 IEM_MC_COMMIT_EFLAGS(EFlags);
8703 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
8704 IEM_MC_ADVANCE_RIP();
8705 IEM_MC_END();
8706 return VINF_SUCCESS;
8707
8708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8709 }
8710 }
8711}
8712
8713
8714/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
8715FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
8716/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
8717FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
8718/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
8719FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
8720/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
8721FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
8722
8723
8724/** Opcode 0x0f 0xc3. */
8725FNIEMOP_DEF(iemOp_movnti_My_Gy)
8726{
8727 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
8728
8729 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8730
8731 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
8732 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8733 {
8734 switch (pVCpu->iem.s.enmEffOpSize)
8735 {
8736 case IEMMODE_32BIT:
8737 IEM_MC_BEGIN(0, 2);
8738 IEM_MC_LOCAL(uint32_t, u32Value);
8739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8740
8741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8743 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8744 return IEMOP_RAISE_INVALID_OPCODE();
8745
8746 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8747 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
8748 IEM_MC_ADVANCE_RIP();
8749 IEM_MC_END();
8750 break;
8751
8752 case IEMMODE_64BIT:
8753 IEM_MC_BEGIN(0, 2);
8754 IEM_MC_LOCAL(uint64_t, u64Value);
8755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8756
8757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8759 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8760 return IEMOP_RAISE_INVALID_OPCODE();
8761
8762 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8763 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
8764 IEM_MC_ADVANCE_RIP();
8765 IEM_MC_END();
8766 break;
8767
8768 case IEMMODE_16BIT:
8769 /** @todo check this form. */
8770 return IEMOP_RAISE_INVALID_OPCODE();
8771 }
8772 }
8773 else
8774 return IEMOP_RAISE_INVALID_OPCODE();
8775 return VINF_SUCCESS;
8776}
8777/* Opcode 0x66 0x0f 0xc3 - invalid */
8778/* Opcode 0xf3 0x0f 0xc3 - invalid */
8779/* Opcode 0xf2 0x0f 0xc3 - invalid */
8780
8781/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
8782FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
8783/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
8784FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
8785/* Opcode 0xf3 0x0f 0xc4 - invalid */
8786/* Opcode 0xf2 0x0f 0xc4 - invalid */
8787
8788/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
8789FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
8790/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
8791FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
8792/* Opcode 0xf3 0x0f 0xc5 - invalid */
8793/* Opcode 0xf2 0x0f 0xc5 - invalid */
8794
8795/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
8796FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
8797/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
8798FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
8799/* Opcode 0xf3 0x0f 0xc6 - invalid */
8800/* Opcode 0xf2 0x0f 0xc6 - invalid */
8801
8802
8803/** Opcode 0x0f 0xc7 !11/1. */
8804FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
8805{
8806 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
8807
8808 IEM_MC_BEGIN(4, 3);
8809 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
8810 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
8811 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
8812 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8813 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
8814 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
8815 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8816
8817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8818 IEMOP_HLP_DONE_DECODING();
8819 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8820
8821 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
8822 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
8823 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
8824
8825 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
8826 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
8827 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
8828
8829 IEM_MC_FETCH_EFLAGS(EFlags);
8830 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8831 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8832 else
8833 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8834
8835 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
8836 IEM_MC_COMMIT_EFLAGS(EFlags);
8837 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8838 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
8839 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
8840 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
8841 IEM_MC_ENDIF();
8842 IEM_MC_ADVANCE_RIP();
8843
8844 IEM_MC_END();
8845 return VINF_SUCCESS;
8846}
8847
8848
8849/** Opcode REX.W 0x0f 0xc7 !11/1. */
8850FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
8851{
8852 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
8853 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8854 {
8855#if 0
8856 RT_NOREF(bRm);
8857 IEMOP_BITCH_ABOUT_STUB();
8858 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8859#else
8860 IEM_MC_BEGIN(4, 3);
8861 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
8862 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
8863 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
8864 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8865 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
8866 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
8867 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8868
8869 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8870 IEMOP_HLP_DONE_DECODING();
8871 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
8872 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8873
8874 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
8875 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
8876 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
8877
8878 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
8879 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
8880 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
8881
8882 IEM_MC_FETCH_EFLAGS(EFlags);
8883# if defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64)
8884# if defined(RT_ARCH_AMD64)
8885 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8886# endif
8887 {
8888 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8889 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8890 else
8891 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8892 }
8893# if defined(RT_ARCH_AMD64)
8894 else
8895# endif
8896# endif
8897# if !defined(RT_ARCH_ARM64) /** @todo may need this for unaligned accesses... */
8898 {
8899 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
8900 accesses and not all all atomic, which works fine on in UNI CPU guest
8901 configuration (ignoring DMA). If guest SMP is active we have no choice
8902 but to use a rendezvous callback here. Sigh. */
8903 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
8904 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8905 else
8906 {
8907 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8908 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
8909 }
8910 }
8911# endif
8912
8913 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
8914 IEM_MC_COMMIT_EFLAGS(EFlags);
8915 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8916 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
8917 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
8918 IEM_MC_ENDIF();
8919 IEM_MC_ADVANCE_RIP();
8920
8921 IEM_MC_END();
8922 return VINF_SUCCESS;
8923#endif
8924 }
8925 Log(("cmpxchg16b -> #UD\n"));
8926 return IEMOP_RAISE_INVALID_OPCODE();
8927}
8928
8929FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
8930{
8931 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
8932 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
8933 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
8934}
8935
8936/** Opcode 0x0f 0xc7 11/6. */
8937FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
8938
8939/** Opcode 0x0f 0xc7 !11/6. */
8940#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8941FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
8942{
8943 IEMOP_MNEMONIC(vmptrld, "vmptrld");
8944 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
8945 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
8946 IEM_MC_BEGIN(2, 0);
8947 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8948 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
8949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8950 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
8951 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8952 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
8953 IEM_MC_END();
8954 return VINF_SUCCESS;
8955}
8956#else
8957FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
8958#endif
8959
8960/** Opcode 0x66 0x0f 0xc7 !11/6. */
8961#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8962FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
8963{
8964 IEMOP_MNEMONIC(vmclear, "vmclear");
8965 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
8966 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
8967 IEM_MC_BEGIN(2, 0);
8968 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8969 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
8970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8971 IEMOP_HLP_DONE_DECODING();
8972 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8973 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
8974 IEM_MC_END();
8975 return VINF_SUCCESS;
8976}
8977#else
8978FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
8979#endif
8980
8981/** Opcode 0xf3 0x0f 0xc7 !11/6. */
8982#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8983FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
8984{
8985 IEMOP_MNEMONIC(vmxon, "vmxon");
8986 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
8987 IEM_MC_BEGIN(2, 0);
8988 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8989 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
8990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8991 IEMOP_HLP_DONE_DECODING();
8992 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8993 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
8994 IEM_MC_END();
8995 return VINF_SUCCESS;
8996}
8997#else
8998FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
8999#endif
9000
9001/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
9002#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9003FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
9004{
9005 IEMOP_MNEMONIC(vmptrst, "vmptrst");
9006 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
9007 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
9008 IEM_MC_BEGIN(2, 0);
9009 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9010 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
9011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9012 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
9013 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9014 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
9015 IEM_MC_END();
9016 return VINF_SUCCESS;
9017}
9018#else
9019FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
9020#endif
9021
9022/** Opcode 0x0f 0xc7 11/7. */
9023FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
9024
9025
9026/**
9027 * Group 9 jump table for register variant.
9028 */
9029IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
9030{ /* pfx: none, 066h, 0f3h, 0f2h */
9031 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
9032 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
9033 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
9034 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
9035 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
9036 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
9037 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9038 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9039};
9040AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
9041
9042
9043/**
9044 * Group 9 jump table for memory variant.
9045 */
9046IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
9047{ /* pfx: none, 066h, 0f3h, 0f2h */
9048 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
9049 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
9050 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
9051 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
9052 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
9053 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
9054 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
9055 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9056};
9057AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
9058
9059
9060/** Opcode 0x0f 0xc7. */
9061FNIEMOP_DEF(iemOp_Grp9)
9062{
9063 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
9064 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9065 /* register, register */
9066 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
9067 + pVCpu->iem.s.idxPrefix], bRm);
9068 /* memory, register */
9069 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
9070 + pVCpu->iem.s.idxPrefix], bRm);
9071}
9072
9073
9074/**
9075 * Common 'bswap register' helper.
9076 */
9077FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
9078{
9079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9080 switch (pVCpu->iem.s.enmEffOpSize)
9081 {
9082 case IEMMODE_16BIT:
9083 IEM_MC_BEGIN(1, 0);
9084 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9085 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
9086 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
9087 IEM_MC_ADVANCE_RIP();
9088 IEM_MC_END();
9089 return VINF_SUCCESS;
9090
9091 case IEMMODE_32BIT:
9092 IEM_MC_BEGIN(1, 0);
9093 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9094 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
9095 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9096 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
9097 IEM_MC_ADVANCE_RIP();
9098 IEM_MC_END();
9099 return VINF_SUCCESS;
9100
9101 case IEMMODE_64BIT:
9102 IEM_MC_BEGIN(1, 0);
9103 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9104 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
9105 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
9106 IEM_MC_ADVANCE_RIP();
9107 IEM_MC_END();
9108 return VINF_SUCCESS;
9109
9110 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9111 }
9112}
9113
9114
9115/** Opcode 0x0f 0xc8. */
9116FNIEMOP_DEF(iemOp_bswap_rAX_r8)
9117{
9118 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
9119 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
9120 prefix. REX.B is the correct prefix it appears. For a parallel
9121 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
9122 IEMOP_HLP_MIN_486();
9123 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
9124}
9125
9126
9127/** Opcode 0x0f 0xc9. */
9128FNIEMOP_DEF(iemOp_bswap_rCX_r9)
9129{
9130 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
9131 IEMOP_HLP_MIN_486();
9132 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
9133}
9134
9135
9136/** Opcode 0x0f 0xca. */
9137FNIEMOP_DEF(iemOp_bswap_rDX_r10)
9138{
9139 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
9140 IEMOP_HLP_MIN_486();
9141 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
9142}
9143
9144
9145/** Opcode 0x0f 0xcb. */
9146FNIEMOP_DEF(iemOp_bswap_rBX_r11)
9147{
9148 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
9149 IEMOP_HLP_MIN_486();
9150 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
9151}
9152
9153
9154/** Opcode 0x0f 0xcc. */
9155FNIEMOP_DEF(iemOp_bswap_rSP_r12)
9156{
9157 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
9158 IEMOP_HLP_MIN_486();
9159 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
9160}
9161
9162
9163/** Opcode 0x0f 0xcd. */
9164FNIEMOP_DEF(iemOp_bswap_rBP_r13)
9165{
9166 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
9167 IEMOP_HLP_MIN_486();
9168 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
9169}
9170
9171
9172/** Opcode 0x0f 0xce. */
9173FNIEMOP_DEF(iemOp_bswap_rSI_r14)
9174{
9175 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
9176 IEMOP_HLP_MIN_486();
9177 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
9178}
9179
9180
9181/** Opcode 0x0f 0xcf. */
9182FNIEMOP_DEF(iemOp_bswap_rDI_r15)
9183{
9184 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
9185 IEMOP_HLP_MIN_486();
9186 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
9187}
9188
9189
9190/* Opcode 0x0f 0xd0 - invalid */
9191/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
9192FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
9193/* Opcode 0xf3 0x0f 0xd0 - invalid */
9194/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
9195FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
9196
9197/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
9198FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
9199/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
9200FNIEMOP_STUB(iemOp_psrlw_Vx_W);
9201/* Opcode 0xf3 0x0f 0xd1 - invalid */
9202/* Opcode 0xf2 0x0f 0xd1 - invalid */
9203
9204/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
9205FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
9206/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
9207FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
9208/* Opcode 0xf3 0x0f 0xd2 - invalid */
9209/* Opcode 0xf2 0x0f 0xd2 - invalid */
9210
9211/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
9212FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
9213/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
9214FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
9215/* Opcode 0xf3 0x0f 0xd3 - invalid */
9216/* Opcode 0xf2 0x0f 0xd3 - invalid */
9217
9218/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
9219FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
9220/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
9221FNIEMOP_STUB(iemOp_paddq_Vx_W);
9222/* Opcode 0xf3 0x0f 0xd4 - invalid */
9223/* Opcode 0xf2 0x0f 0xd4 - invalid */
9224
9225/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
9226FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
9227/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
9228FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
9229/* Opcode 0xf3 0x0f 0xd5 - invalid */
9230/* Opcode 0xf2 0x0f 0xd5 - invalid */
9231
9232/* Opcode 0x0f 0xd6 - invalid */
9233
9234/**
9235 * @opcode 0xd6
9236 * @oppfx 0x66
9237 * @opcpuid sse2
9238 * @opgroup og_sse2_pcksclr_datamove
9239 * @opxcpttype none
9240 * @optest op1=-1 op2=2 -> op1=2
9241 * @optest op1=0 op2=-42 -> op1=-42
9242 */
9243FNIEMOP_DEF(iemOp_movq_Wq_Vq)
9244{
9245 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9246 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9247 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9248 {
9249 /*
9250 * Register, register.
9251 */
9252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9253 IEM_MC_BEGIN(0, 2);
9254 IEM_MC_LOCAL(uint64_t, uSrc);
9255
9256 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9257 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
9258
9259 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9260 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
9261
9262 IEM_MC_ADVANCE_RIP();
9263 IEM_MC_END();
9264 }
9265 else
9266 {
9267 /*
9268 * Memory, register.
9269 */
9270 IEM_MC_BEGIN(0, 2);
9271 IEM_MC_LOCAL(uint64_t, uSrc);
9272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9273
9274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9276 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9277 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9278
9279 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9280 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9281
9282 IEM_MC_ADVANCE_RIP();
9283 IEM_MC_END();
9284 }
9285 return VINF_SUCCESS;
9286}
9287
9288
9289/**
9290 * @opcode 0xd6
9291 * @opcodesub 11 mr/reg
9292 * @oppfx f3
9293 * @opcpuid sse2
9294 * @opgroup og_sse2_simdint_datamove
9295 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
9296 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
9297 */
9298FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
9299{
9300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9301 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9302 {
9303 /*
9304 * Register, register.
9305 */
9306 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9308 IEM_MC_BEGIN(0, 1);
9309 IEM_MC_LOCAL(uint64_t, uSrc);
9310
9311 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9312 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9313
9314 IEM_MC_FETCH_MREG_U64(uSrc, bRm & X86_MODRM_RM_MASK);
9315 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
9316 IEM_MC_FPU_TO_MMX_MODE();
9317
9318 IEM_MC_ADVANCE_RIP();
9319 IEM_MC_END();
9320 return VINF_SUCCESS;
9321 }
9322
9323 /**
9324 * @opdone
9325 * @opmnemonic udf30fd6mem
9326 * @opcode 0xd6
9327 * @opcodesub !11 mr/reg
9328 * @oppfx f3
9329 * @opunused intel-modrm
9330 * @opcpuid sse
9331 * @optest ->
9332 */
9333 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
9334}
9335
9336
9337/**
9338 * @opcode 0xd6
9339 * @opcodesub 11 mr/reg
9340 * @oppfx f2
9341 * @opcpuid sse2
9342 * @opgroup og_sse2_simdint_datamove
9343 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
9344 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
9345 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
9346 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
9347 * @optest op1=-42 op2=0xfedcba9876543210
9348 * -> op1=0xfedcba9876543210 ftw=0xff
9349 */
9350FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
9351{
9352 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9353 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9354 {
9355 /*
9356 * Register, register.
9357 */
9358 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9360 IEM_MC_BEGIN(0, 1);
9361 IEM_MC_LOCAL(uint64_t, uSrc);
9362
9363 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9364 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9365
9366 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9367 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, uSrc);
9368 IEM_MC_FPU_TO_MMX_MODE();
9369
9370 IEM_MC_ADVANCE_RIP();
9371 IEM_MC_END();
9372 return VINF_SUCCESS;
9373 }
9374
9375 /**
9376 * @opdone
9377 * @opmnemonic udf20fd6mem
9378 * @opcode 0xd6
9379 * @opcodesub !11 mr/reg
9380 * @oppfx f2
9381 * @opunused intel-modrm
9382 * @opcpuid sse
9383 * @optest ->
9384 */
9385 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
9386}
9387
9388/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
9389FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
9390{
9391 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
9392 /** @todo testcase: Check that the instruction implicitly clears the high
9393 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
9394 * and opcode modifications are made to work with the whole width (not
9395 * just 128). */
9396 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
9397 /* Docs says register only. */
9398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9399 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
9400 {
9401 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
9402 IEM_MC_BEGIN(2, 0);
9403 IEM_MC_ARG(uint64_t *, pDst, 0);
9404 IEM_MC_ARG(uint64_t const *, pSrc, 1);
9405 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
9406 IEM_MC_PREPARE_FPU_USAGE();
9407 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9408 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
9409 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
9410 IEM_MC_ADVANCE_RIP();
9411 IEM_MC_END();
9412 return VINF_SUCCESS;
9413 }
9414 return IEMOP_RAISE_INVALID_OPCODE();
9415}
9416
9417/** Opcode 0x66 0x0f 0xd7 - */
9418FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
9419{
9420 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
9421 /** @todo testcase: Check that the instruction implicitly clears the high
9422 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
9423 * and opcode modifications are made to work with the whole width (not
9424 * just 128). */
9425 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
9426 /* Docs says register only. */
9427 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9428 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
9429 {
9430 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
9431 IEM_MC_BEGIN(2, 0);
9432 IEM_MC_ARG(uint64_t *, pDst, 0);
9433 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
9434 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9435 IEM_MC_PREPARE_SSE_USAGE();
9436 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9437 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9438 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
9439 IEM_MC_ADVANCE_RIP();
9440 IEM_MC_END();
9441 return VINF_SUCCESS;
9442 }
9443 return IEMOP_RAISE_INVALID_OPCODE();
9444}
9445
9446/* Opcode 0xf3 0x0f 0xd7 - invalid */
9447/* Opcode 0xf2 0x0f 0xd7 - invalid */
9448
9449
9450/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
9451FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
9452/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
9453FNIEMOP_STUB(iemOp_psubusb_Vx_W);
9454/* Opcode 0xf3 0x0f 0xd8 - invalid */
9455/* Opcode 0xf2 0x0f 0xd8 - invalid */
9456
9457/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
9458FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
9459/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
9460FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
9461/* Opcode 0xf3 0x0f 0xd9 - invalid */
9462/* Opcode 0xf2 0x0f 0xd9 - invalid */
9463
9464/** Opcode 0x0f 0xda - pminub Pq, Qq */
9465FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
9466/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
9467FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
9468/* Opcode 0xf3 0x0f 0xda - invalid */
9469/* Opcode 0xf2 0x0f 0xda - invalid */
9470
9471/** Opcode 0x0f 0xdb - pand Pq, Qq */
9472FNIEMOP_DEF(iemOp_pand_Pq_Qq)
9473{
9474 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9475 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pand);
9476}
9477
9478
9479/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
9480FNIEMOP_DEF(iemOp_pand_Vx_Wx)
9481{
9482 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9483 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pand);
9484}
9485
9486
9487/* Opcode 0xf3 0x0f 0xdb - invalid */
9488/* Opcode 0xf2 0x0f 0xdb - invalid */
9489
9490/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
9491FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
9492/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
9493FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
9494/* Opcode 0xf3 0x0f 0xdc - invalid */
9495/* Opcode 0xf2 0x0f 0xdc - invalid */
9496
9497/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
9498FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
9499/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
9500FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
9501/* Opcode 0xf3 0x0f 0xdd - invalid */
9502/* Opcode 0xf2 0x0f 0xdd - invalid */
9503
9504/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
9505FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
9506/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
9507FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
9508/* Opcode 0xf3 0x0f 0xde - invalid */
9509/* Opcode 0xf2 0x0f 0xde - invalid */
9510
9511
9512/** Opcode 0x0f 0xdf - pandn Pq, Qq */
9513FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
9514{
9515 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9516 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pandn);
9517}
9518
9519
9520/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
9521FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
9522{
9523 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9524 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pandn);
9525}
9526
9527
9528/* Opcode 0xf3 0x0f 0xdf - invalid */
9529/* Opcode 0xf2 0x0f 0xdf - invalid */
9530
9531/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
9532FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
9533/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
9534FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
9535/* Opcode 0xf3 0x0f 0xe0 - invalid */
9536/* Opcode 0xf2 0x0f 0xe0 - invalid */
9537
9538/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
9539FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
9540/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
9541FNIEMOP_STUB(iemOp_psraw_Vx_W);
9542/* Opcode 0xf3 0x0f 0xe1 - invalid */
9543/* Opcode 0xf2 0x0f 0xe1 - invalid */
9544
9545/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
9546FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
9547/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
9548FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
9549/* Opcode 0xf3 0x0f 0xe2 - invalid */
9550/* Opcode 0xf2 0x0f 0xe2 - invalid */
9551
9552/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
9553FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
9554/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
9555FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
9556/* Opcode 0xf3 0x0f 0xe3 - invalid */
9557/* Opcode 0xf2 0x0f 0xe3 - invalid */
9558
9559/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
9560FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
9561/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
9562FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
9563/* Opcode 0xf3 0x0f 0xe4 - invalid */
9564/* Opcode 0xf2 0x0f 0xe4 - invalid */
9565
9566/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
9567FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
9568/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
9569FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
9570/* Opcode 0xf3 0x0f 0xe5 - invalid */
9571/* Opcode 0xf2 0x0f 0xe5 - invalid */
9572
9573/* Opcode 0x0f 0xe6 - invalid */
9574/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
9575FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
9576/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
9577FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
9578/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
9579FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
9580
9581
9582/**
9583 * @opcode 0xe7
9584 * @opcodesub !11 mr/reg
9585 * @oppfx none
9586 * @opcpuid sse
9587 * @opgroup og_sse1_cachect
9588 * @opxcpttype none
9589 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
9590 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
9591 */
9592FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
9593{
9594 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9595 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9596 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9597 {
9598 /* Register, memory. */
9599 IEM_MC_BEGIN(0, 2);
9600 IEM_MC_LOCAL(uint64_t, uSrc);
9601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9602
9603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9605 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
9606 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9607
9608 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9609 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9610 IEM_MC_FPU_TO_MMX_MODE();
9611
9612 IEM_MC_ADVANCE_RIP();
9613 IEM_MC_END();
9614 return VINF_SUCCESS;
9615 }
9616 /**
9617 * @opdone
9618 * @opmnemonic ud0fe7reg
9619 * @opcode 0xe7
9620 * @opcodesub 11 mr/reg
9621 * @oppfx none
9622 * @opunused immediate
9623 * @opcpuid sse
9624 * @optest ->
9625 */
9626 return IEMOP_RAISE_INVALID_OPCODE();
9627}
9628
9629/**
9630 * @opcode 0xe7
9631 * @opcodesub !11 mr/reg
9632 * @oppfx 0x66
9633 * @opcpuid sse2
9634 * @opgroup og_sse2_cachect
9635 * @opxcpttype 1
9636 * @optest op1=-1 op2=2 -> op1=2
9637 * @optest op1=0 op2=-42 -> op1=-42
9638 */
9639FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
9640{
9641 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9642 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9643 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9644 {
9645 /* Register, memory. */
9646 IEM_MC_BEGIN(0, 2);
9647 IEM_MC_LOCAL(RTUINT128U, uSrc);
9648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9649
9650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9652 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9653 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9654
9655 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9656 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9657
9658 IEM_MC_ADVANCE_RIP();
9659 IEM_MC_END();
9660 return VINF_SUCCESS;
9661 }
9662
9663 /**
9664 * @opdone
9665 * @opmnemonic ud660fe7reg
9666 * @opcode 0xe7
9667 * @opcodesub 11 mr/reg
9668 * @oppfx 0x66
9669 * @opunused immediate
9670 * @opcpuid sse
9671 * @optest ->
9672 */
9673 return IEMOP_RAISE_INVALID_OPCODE();
9674}
9675
9676/* Opcode 0xf3 0x0f 0xe7 - invalid */
9677/* Opcode 0xf2 0x0f 0xe7 - invalid */
9678
9679
9680/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
9681FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
9682/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
9683FNIEMOP_STUB(iemOp_psubsb_Vx_W);
9684/* Opcode 0xf3 0x0f 0xe8 - invalid */
9685/* Opcode 0xf2 0x0f 0xe8 - invalid */
9686
9687/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
9688FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
9689/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
9690FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
9691/* Opcode 0xf3 0x0f 0xe9 - invalid */
9692/* Opcode 0xf2 0x0f 0xe9 - invalid */
9693
9694/** Opcode 0x0f 0xea - pminsw Pq, Qq */
9695FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
9696/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
9697FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
9698/* Opcode 0xf3 0x0f 0xea - invalid */
9699/* Opcode 0xf2 0x0f 0xea - invalid */
9700
9701
9702/** Opcode 0x0f 0xeb - por Pq, Qq */
9703FNIEMOP_DEF(iemOp_por_Pq_Qq)
9704{
9705 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9706 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_por);
9707}
9708
9709
9710/** Opcode 0x66 0x0f 0xeb - por Vx, W */
9711FNIEMOP_DEF(iemOp_por_Vx_W)
9712{
9713 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9714 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_por);
9715}
9716
9717
9718/* Opcode 0xf3 0x0f 0xeb - invalid */
9719/* Opcode 0xf2 0x0f 0xeb - invalid */
9720
9721/** Opcode 0x0f 0xec - paddsb Pq, Qq */
9722FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
9723/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
9724FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
9725/* Opcode 0xf3 0x0f 0xec - invalid */
9726/* Opcode 0xf2 0x0f 0xec - invalid */
9727
9728/** Opcode 0x0f 0xed - paddsw Pq, Qq */
9729FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
9730/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
9731FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
9732/* Opcode 0xf3 0x0f 0xed - invalid */
9733/* Opcode 0xf2 0x0f 0xed - invalid */
9734
9735/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
9736FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
9737/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
9738FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
9739/* Opcode 0xf3 0x0f 0xee - invalid */
9740/* Opcode 0xf2 0x0f 0xee - invalid */
9741
9742
9743/** Opcode 0x0f 0xef - pxor Pq, Qq */
9744FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
9745{
9746 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9747 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
9748}
9749
9750
9751/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
9752FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
9753{
9754 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9755 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
9756}
9757
9758
9759/* Opcode 0xf3 0x0f 0xef - invalid */
9760/* Opcode 0xf2 0x0f 0xef - invalid */
9761
9762/* Opcode 0x0f 0xf0 - invalid */
9763/* Opcode 0x66 0x0f 0xf0 - invalid */
9764/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
9765FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
9766
9767/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
9768FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
9769/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
9770FNIEMOP_STUB(iemOp_psllw_Vx_W);
9771/* Opcode 0xf2 0x0f 0xf1 - invalid */
9772
9773/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
9774FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
9775/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
9776FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
9777/* Opcode 0xf2 0x0f 0xf2 - invalid */
9778
9779/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
9780FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
9781/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
9782FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
9783/* Opcode 0xf2 0x0f 0xf3 - invalid */
9784
9785/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
9786FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
9787/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
9788FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
9789/* Opcode 0xf2 0x0f 0xf4 - invalid */
9790
9791/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
9792FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
9793/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
9794FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
9795/* Opcode 0xf2 0x0f 0xf5 - invalid */
9796
9797/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
9798FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
9799/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
9800FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
9801/* Opcode 0xf2 0x0f 0xf6 - invalid */
9802
9803/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
9804FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
9805/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
9806FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
9807/* Opcode 0xf2 0x0f 0xf7 - invalid */
9808
9809/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
9810FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
9811/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
9812FNIEMOP_STUB(iemOp_psubb_Vx_W);
9813/* Opcode 0xf2 0x0f 0xf8 - invalid */
9814
9815/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
9816FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
9817/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
9818FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
9819/* Opcode 0xf2 0x0f 0xf9 - invalid */
9820
9821/** Opcode 0x0f 0xfa - psubd Pq, Qq */
9822FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
9823/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
9824FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
9825/* Opcode 0xf2 0x0f 0xfa - invalid */
9826
9827/** Opcode 0x0f 0xfb - psubq Pq, Qq */
9828FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
9829/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
9830FNIEMOP_STUB(iemOp_psubq_Vx_W);
9831/* Opcode 0xf2 0x0f 0xfb - invalid */
9832
9833/** Opcode 0x0f 0xfc - paddb Pq, Qq */
9834FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
9835/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
9836FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
9837/* Opcode 0xf2 0x0f 0xfc - invalid */
9838
9839/** Opcode 0x0f 0xfd - paddw Pq, Qq */
9840FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
9841/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
9842FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
9843/* Opcode 0xf2 0x0f 0xfd - invalid */
9844
9845/** Opcode 0x0f 0xfe - paddd Pq, Qq */
9846FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
9847/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
9848FNIEMOP_STUB(iemOp_paddd_Vx_W);
9849/* Opcode 0xf2 0x0f 0xfe - invalid */
9850
9851
9852/** Opcode **** 0x0f 0xff - UD0 */
9853FNIEMOP_DEF(iemOp_ud0)
9854{
9855 IEMOP_MNEMONIC(ud0, "ud0");
9856 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
9857 {
9858 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
9859#ifndef TST_IEM_CHECK_MC
9860 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9861 {
9862 RTGCPTR GCPtrEff;
9863 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
9864 if (rcStrict != VINF_SUCCESS)
9865 return rcStrict;
9866 }
9867#endif
9868 IEMOP_HLP_DONE_DECODING();
9869 }
9870 return IEMOP_RAISE_INVALID_OPCODE();
9871}
9872
9873
9874
9875/**
9876 * Two byte opcode map, first byte 0x0f.
9877 *
9878 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
9879 * check if it needs updating as well when making changes.
9880 */
9881IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
9882{
9883 /* no prefix, 066h prefix f3h prefix, f2h prefix */
9884 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
9885 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
9886 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
9887 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
9888 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
9889 /* 0x05 */ IEMOP_X4(iemOp_syscall),
9890 /* 0x06 */ IEMOP_X4(iemOp_clts),
9891 /* 0x07 */ IEMOP_X4(iemOp_sysret),
9892 /* 0x08 */ IEMOP_X4(iemOp_invd),
9893 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
9894 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
9895 /* 0x0b */ IEMOP_X4(iemOp_ud2),
9896 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
9897 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
9898 /* 0x0e */ IEMOP_X4(iemOp_femms),
9899 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
9900
9901 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
9902 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
9903 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
9904 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9905 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9906 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9907 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
9908 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9909 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
9910 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
9911 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
9912 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
9913 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
9914 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
9915 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
9916 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
9917
9918 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
9919 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
9920 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
9921 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
9922 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
9923 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9924 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
9925 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9926 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9927 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9928 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
9929 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9930 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
9931 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
9932 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9933 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9934
9935 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
9936 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
9937 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
9938 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
9939 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
9940 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
9941 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
9942 /* 0x37 */ IEMOP_X4(iemOp_getsec),
9943 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
9944 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9945 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
9946 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9947 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9948 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9949 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9950 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9951
9952 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
9953 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
9954 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
9955 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
9956 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
9957 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
9958 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
9959 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
9960 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
9961 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
9962 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
9963 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
9964 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
9965 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
9966 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
9967 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
9968
9969 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9970 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
9971 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
9972 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
9973 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9974 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9975 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9976 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9977 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
9978 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
9979 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
9980 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
9981 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
9982 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
9983 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
9984 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
9985
9986 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9987 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9988 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9989 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9990 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9991 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9992 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9993 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9994 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9995 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9996 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9997 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9998 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9999 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10000 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10001 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
10002
10003 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
10004 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
10005 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
10006 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
10007 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10008 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10009 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10010 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10011
10012 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10013 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10014 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10015 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10016 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
10017 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
10018 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
10019 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
10020
10021 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
10022 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
10023 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
10024 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
10025 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
10026 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
10027 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
10028 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
10029 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
10030 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
10031 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
10032 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
10033 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
10034 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
10035 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
10036 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
10037
10038 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
10039 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
10040 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
10041 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
10042 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
10043 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
10044 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
10045 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
10046 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
10047 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
10048 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
10049 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
10050 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
10051 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
10052 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
10053 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
10054
10055 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
10056 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
10057 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
10058 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
10059 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
10060 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
10061 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
10062 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
10063 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
10064 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
10065 /* 0xaa */ IEMOP_X4(iemOp_rsm),
10066 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
10067 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
10068 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
10069 /* 0xae */ IEMOP_X4(iemOp_Grp15),
10070 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
10071
10072 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
10073 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
10074 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
10075 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
10076 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
10077 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
10078 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
10079 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
10080 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
10081 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
10082 /* 0xba */ IEMOP_X4(iemOp_Grp8),
10083 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
10084 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
10085 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
10086 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
10087 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
10088
10089 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
10090 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
10091 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
10092 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10093 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
10094 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
10095 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
10096 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
10097 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
10098 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
10099 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
10100 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
10101 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
10102 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
10103 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
10104 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
10105
10106 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
10107 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10108 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10109 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10110 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10111 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10112 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
10113 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10114 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10115 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10116 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10117 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10118 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10119 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10120 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10121 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10122
10123 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10124 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10125 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10126 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10127 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10128 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10129 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
10130 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10131 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10132 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10133 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10134 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10135 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10136 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10137 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10138 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10139
10140 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
10141 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10142 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10143 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10144 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10145 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10146 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10147 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10148 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10149 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10150 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10151 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10152 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10153 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10154 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
10155 /* 0xff */ IEMOP_X4(iemOp_ud0),
10156};
10157AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
10158
10159/** @} */
10160
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette