VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 72484

Last change on this file since 72484 was 72468, checked in by vboxsync, 7 years ago

IEM: Missing IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX() for svm instructions. Added comment to vmcall and vmmcall explaining why no CPU features are checked when decoding. bugref:9044

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 337.0 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 72468 2018-06-07 09:38:33Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
38 switch (pVCpu->iem.s.enmEffOpSize)
39 {
40 case IEMMODE_16BIT:
41 IEM_MC_BEGIN(0, 1);
42 IEM_MC_LOCAL(uint16_t, u16Ldtr);
43 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
44 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
45 IEM_MC_ADVANCE_RIP();
46 IEM_MC_END();
47 break;
48
49 case IEMMODE_32BIT:
50 IEM_MC_BEGIN(0, 1);
51 IEM_MC_LOCAL(uint32_t, u32Ldtr);
52 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
53 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
54 IEM_MC_ADVANCE_RIP();
55 IEM_MC_END();
56 break;
57
58 case IEMMODE_64BIT:
59 IEM_MC_BEGIN(0, 1);
60 IEM_MC_LOCAL(uint64_t, u64Ldtr);
61 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
62 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
63 IEM_MC_ADVANCE_RIP();
64 IEM_MC_END();
65 break;
66
67 IEM_NOT_REACHED_DEFAULT_CASE_RET();
68 }
69 }
70 else
71 {
72 IEM_MC_BEGIN(0, 2);
73 IEM_MC_LOCAL(uint16_t, u16Ldtr);
74 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
75 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
76 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
77 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
78 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
79 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/** Opcode 0x0f 0x00 /1. */
88FNIEMOPRM_DEF(iemOp_Grp6_str)
89{
90 IEMOP_MNEMONIC(str, "str Rv/Mw");
91 IEMOP_HLP_MIN_286();
92 IEMOP_HLP_NO_REAL_OR_V86_MODE();
93
94 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
95 {
96 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
97 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
98 switch (pVCpu->iem.s.enmEffOpSize)
99 {
100 case IEMMODE_16BIT:
101 IEM_MC_BEGIN(0, 1);
102 IEM_MC_LOCAL(uint16_t, u16Tr);
103 IEM_MC_FETCH_TR_U16(u16Tr);
104 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
105 IEM_MC_ADVANCE_RIP();
106 IEM_MC_END();
107 break;
108
109 case IEMMODE_32BIT:
110 IEM_MC_BEGIN(0, 1);
111 IEM_MC_LOCAL(uint32_t, u32Tr);
112 IEM_MC_FETCH_TR_U32(u32Tr);
113 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
114 IEM_MC_ADVANCE_RIP();
115 IEM_MC_END();
116 break;
117
118 case IEMMODE_64BIT:
119 IEM_MC_BEGIN(0, 1);
120 IEM_MC_LOCAL(uint64_t, u64Tr);
121 IEM_MC_FETCH_TR_U64(u64Tr);
122 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
123 IEM_MC_ADVANCE_RIP();
124 IEM_MC_END();
125 break;
126
127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
128 }
129 }
130 else
131 {
132 IEM_MC_BEGIN(0, 2);
133 IEM_MC_LOCAL(uint16_t, u16Tr);
134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
136 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
137 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
138 IEM_MC_FETCH_TR_U16(u16Tr);
139 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
140 IEM_MC_ADVANCE_RIP();
141 IEM_MC_END();
142 }
143 return VINF_SUCCESS;
144}
145
146
147/** Opcode 0x0f 0x00 /2. */
148FNIEMOPRM_DEF(iemOp_Grp6_lldt)
149{
150 IEMOP_MNEMONIC(lldt, "lldt Ew");
151 IEMOP_HLP_MIN_286();
152 IEMOP_HLP_NO_REAL_OR_V86_MODE();
153
154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
155 {
156 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
157 IEM_MC_BEGIN(1, 0);
158 IEM_MC_ARG(uint16_t, u16Sel, 0);
159 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
160 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
161 IEM_MC_END();
162 }
163 else
164 {
165 IEM_MC_BEGIN(1, 1);
166 IEM_MC_ARG(uint16_t, u16Sel, 0);
167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
169 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
170 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
171 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
172 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
173 IEM_MC_END();
174 }
175 return VINF_SUCCESS;
176}
177
178
179/** Opcode 0x0f 0x00 /3. */
180FNIEMOPRM_DEF(iemOp_Grp6_ltr)
181{
182 IEMOP_MNEMONIC(ltr, "ltr Ew");
183 IEMOP_HLP_MIN_286();
184 IEMOP_HLP_NO_REAL_OR_V86_MODE();
185
186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
187 {
188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
189 IEM_MC_BEGIN(1, 0);
190 IEM_MC_ARG(uint16_t, u16Sel, 0);
191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
192 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
193 IEM_MC_END();
194 }
195 else
196 {
197 IEM_MC_BEGIN(1, 1);
198 IEM_MC_ARG(uint16_t, u16Sel, 0);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
202 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
203 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
204 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
205 IEM_MC_END();
206 }
207 return VINF_SUCCESS;
208}
209
210
211/** Opcode 0x0f 0x00 /3. */
212FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
213{
214 IEMOP_HLP_MIN_286();
215 IEMOP_HLP_NO_REAL_OR_V86_MODE();
216
217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
218 {
219 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
220 IEM_MC_BEGIN(2, 0);
221 IEM_MC_ARG(uint16_t, u16Sel, 0);
222 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
223 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
224 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
225 IEM_MC_END();
226 }
227 else
228 {
229 IEM_MC_BEGIN(2, 1);
230 IEM_MC_ARG(uint16_t, u16Sel, 0);
231 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
234 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
235 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
236 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
237 IEM_MC_END();
238 }
239 return VINF_SUCCESS;
240}
241
242
243/** Opcode 0x0f 0x00 /4. */
244FNIEMOPRM_DEF(iemOp_Grp6_verr)
245{
246 IEMOP_MNEMONIC(verr, "verr Ew");
247 IEMOP_HLP_MIN_286();
248 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
249}
250
251
252/** Opcode 0x0f 0x00 /5. */
253FNIEMOPRM_DEF(iemOp_Grp6_verw)
254{
255 IEMOP_MNEMONIC(verw, "verw Ew");
256 IEMOP_HLP_MIN_286();
257 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
258}
259
260
261/**
262 * Group 6 jump table.
263 */
264IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
265{
266 iemOp_Grp6_sldt,
267 iemOp_Grp6_str,
268 iemOp_Grp6_lldt,
269 iemOp_Grp6_ltr,
270 iemOp_Grp6_verr,
271 iemOp_Grp6_verw,
272 iemOp_InvalidWithRM,
273 iemOp_InvalidWithRM
274};
275
276/** Opcode 0x0f 0x00. */
277FNIEMOP_DEF(iemOp_Grp6)
278{
279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
280 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
281}
282
283
284/** Opcode 0x0f 0x01 /0. */
285FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
286{
287 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
288 IEMOP_HLP_MIN_286();
289 IEMOP_HLP_64BIT_OP_SIZE();
290 IEM_MC_BEGIN(2, 1);
291 IEM_MC_ARG(uint8_t, iEffSeg, 0);
292 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
295 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
296 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
297 IEM_MC_END();
298 return VINF_SUCCESS;
299}
300
301
302/** Opcode 0x0f 0x01 /0. */
303FNIEMOP_DEF(iemOp_Grp7_vmcall)
304{
305 IEMOP_MNEMONIC(vmcall, "vmcall");
306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
307
308 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
309 want all hypercalls regardless of instruction used, and if a
310 hypercall isn't handled by GIM or HMSvm will raise an #UD.
311 (NEM/win makes ASSUMPTIONS about this behavior.) */
312 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
313}
314
315
316/** Opcode 0x0f 0x01 /0. */
317FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
318{
319 IEMOP_BITCH_ABOUT_STUB();
320 return IEMOP_RAISE_INVALID_OPCODE();
321}
322
323
324/** Opcode 0x0f 0x01 /0. */
325FNIEMOP_DEF(iemOp_Grp7_vmresume)
326{
327 IEMOP_BITCH_ABOUT_STUB();
328 return IEMOP_RAISE_INVALID_OPCODE();
329}
330
331
332/** Opcode 0x0f 0x01 /0. */
333FNIEMOP_DEF(iemOp_Grp7_vmxoff)
334{
335 IEMOP_BITCH_ABOUT_STUB();
336 return IEMOP_RAISE_INVALID_OPCODE();
337}
338
339
340/** Opcode 0x0f 0x01 /1. */
341FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
342{
343 IEMOP_MNEMONIC(sidt, "sidt Ms");
344 IEMOP_HLP_MIN_286();
345 IEMOP_HLP_64BIT_OP_SIZE();
346 IEM_MC_BEGIN(2, 1);
347 IEM_MC_ARG(uint8_t, iEffSeg, 0);
348 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
351 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
352 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
353 IEM_MC_END();
354 return VINF_SUCCESS;
355}
356
357
358/** Opcode 0x0f 0x01 /1. */
359FNIEMOP_DEF(iemOp_Grp7_monitor)
360{
361 IEMOP_MNEMONIC(monitor, "monitor");
362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
363 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
364}
365
366
367/** Opcode 0x0f 0x01 /1. */
368FNIEMOP_DEF(iemOp_Grp7_mwait)
369{
370 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
372 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
373}
374
375
376/** Opcode 0x0f 0x01 /2. */
377FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
378{
379 IEMOP_MNEMONIC(lgdt, "lgdt");
380 IEMOP_HLP_64BIT_OP_SIZE();
381 IEM_MC_BEGIN(3, 1);
382 IEM_MC_ARG(uint8_t, iEffSeg, 0);
383 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
384 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
387 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
388 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
389 IEM_MC_END();
390 return VINF_SUCCESS;
391}
392
393
394/** Opcode 0x0f 0x01 0xd0. */
395FNIEMOP_DEF(iemOp_Grp7_xgetbv)
396{
397 IEMOP_MNEMONIC(xgetbv, "xgetbv");
398 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
399 {
400 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
401 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
402 }
403 return IEMOP_RAISE_INVALID_OPCODE();
404}
405
406
407/** Opcode 0x0f 0x01 0xd1. */
408FNIEMOP_DEF(iemOp_Grp7_xsetbv)
409{
410 IEMOP_MNEMONIC(xsetbv, "xsetbv");
411 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
412 {
413 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
414 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
415 }
416 return IEMOP_RAISE_INVALID_OPCODE();
417}
418
419
420/** Opcode 0x0f 0x01 /3. */
421FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
422{
423 IEMOP_MNEMONIC(lidt, "lidt");
424 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
425 ? IEMMODE_64BIT
426 : pVCpu->iem.s.enmEffOpSize;
427 IEM_MC_BEGIN(3, 1);
428 IEM_MC_ARG(uint8_t, iEffSeg, 0);
429 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
430 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
433 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
434 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
435 IEM_MC_END();
436 return VINF_SUCCESS;
437}
438
439
440/** Opcode 0x0f 0x01 0xd8. */
441#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
442FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
443{
444 IEMOP_MNEMONIC(vmrun, "vmrun");
445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
447}
448#else
449FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
450#endif
451
452/** Opcode 0x0f 0x01 0xd9. */
453FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
454{
455 IEMOP_MNEMONIC(vmmcall, "vmmcall");
456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
457
458 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
459 want all hypercalls regardless of instruction used, and if a
460 hypercall isn't handled by GIM or HMSvm will raise an #UD.
461 (NEM/win makes ASSUMPTIONS about this behavior.) */
462 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
463}
464
465/** Opcode 0x0f 0x01 0xda. */
466#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
467FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
468{
469 IEMOP_MNEMONIC(vmload, "vmload");
470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
471 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
472}
473#else
474FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
475#endif
476
477
478/** Opcode 0x0f 0x01 0xdb. */
479#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
480FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
481{
482 IEMOP_MNEMONIC(vmsave, "vmsave");
483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
484 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
485}
486#else
487FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
488#endif
489
490
491/** Opcode 0x0f 0x01 0xdc. */
492#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
493FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
494{
495 IEMOP_MNEMONIC(stgi, "stgi");
496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
497 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
498}
499#else
500FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
501#endif
502
503
504/** Opcode 0x0f 0x01 0xdd. */
505#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
506FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
507{
508 IEMOP_MNEMONIC(clgi, "clgi");
509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
510 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
511}
512#else
513FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
514#endif
515
516
517/** Opcode 0x0f 0x01 0xdf. */
518#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
519FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
520{
521 IEMOP_MNEMONIC(invlpga, "invlpga");
522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
523 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
524}
525#else
526FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
527#endif
528
529
530/** Opcode 0x0f 0x01 0xde. */
531#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
532FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
533{
534 IEMOP_MNEMONIC(skinit, "skinit");
535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
536 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
537}
538#else
539FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
540#endif
541
542
543/** Opcode 0x0f 0x01 /4. */
544FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
545{
546 IEMOP_MNEMONIC(smsw, "smsw");
547 IEMOP_HLP_MIN_286();
548 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
549 {
550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
551 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
552 switch (pVCpu->iem.s.enmEffOpSize)
553 {
554 case IEMMODE_16BIT:
555 IEM_MC_BEGIN(0, 1);
556 IEM_MC_LOCAL(uint16_t, u16Tmp);
557 IEM_MC_FETCH_CR0_U16(u16Tmp);
558 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
559 { /* likely */ }
560 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
561 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
562 else
563 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
564 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
565 IEM_MC_ADVANCE_RIP();
566 IEM_MC_END();
567 return VINF_SUCCESS;
568
569 case IEMMODE_32BIT:
570 IEM_MC_BEGIN(0, 1);
571 IEM_MC_LOCAL(uint32_t, u32Tmp);
572 IEM_MC_FETCH_CR0_U32(u32Tmp);
573 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
574 IEM_MC_ADVANCE_RIP();
575 IEM_MC_END();
576 return VINF_SUCCESS;
577
578 case IEMMODE_64BIT:
579 IEM_MC_BEGIN(0, 1);
580 IEM_MC_LOCAL(uint64_t, u64Tmp);
581 IEM_MC_FETCH_CR0_U64(u64Tmp);
582 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
583 IEM_MC_ADVANCE_RIP();
584 IEM_MC_END();
585 return VINF_SUCCESS;
586
587 IEM_NOT_REACHED_DEFAULT_CASE_RET();
588 }
589 }
590 else
591 {
592 /* Ignore operand size here, memory refs are always 16-bit. */
593 IEM_MC_BEGIN(0, 2);
594 IEM_MC_LOCAL(uint16_t, u16Tmp);
595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
598 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
599 IEM_MC_FETCH_CR0_U16(u16Tmp);
600 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
601 { /* likely */ }
602 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
603 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
604 else
605 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
606 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
607 IEM_MC_ADVANCE_RIP();
608 IEM_MC_END();
609 return VINF_SUCCESS;
610 }
611}
612
613
614/** Opcode 0x0f 0x01 /6. */
615FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
616{
617 /* The operand size is effectively ignored, all is 16-bit and only the
618 lower 3-bits are used. */
619 IEMOP_MNEMONIC(lmsw, "lmsw");
620 IEMOP_HLP_MIN_286();
621 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
622 {
623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
624 IEM_MC_BEGIN(1, 0);
625 IEM_MC_ARG(uint16_t, u16Tmp, 0);
626 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
627 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
628 IEM_MC_END();
629 }
630 else
631 {
632 IEM_MC_BEGIN(1, 1);
633 IEM_MC_ARG(uint16_t, u16Tmp, 0);
634 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
635 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
637 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
638 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
639 IEM_MC_END();
640 }
641 return VINF_SUCCESS;
642}
643
644
645/** Opcode 0x0f 0x01 /7. */
646FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
647{
648 IEMOP_MNEMONIC(invlpg, "invlpg");
649 IEMOP_HLP_MIN_486();
650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
651 IEM_MC_BEGIN(1, 1);
652 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
654 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
655 IEM_MC_END();
656 return VINF_SUCCESS;
657}
658
659
660/** Opcode 0x0f 0x01 /7. */
661FNIEMOP_DEF(iemOp_Grp7_swapgs)
662{
663 IEMOP_MNEMONIC(swapgs, "swapgs");
664 IEMOP_HLP_ONLY_64BIT();
665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
666 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
667}
668
669
670/** Opcode 0x0f 0x01 /7. */
671FNIEMOP_DEF(iemOp_Grp7_rdtscp)
672{
673 IEMOP_MNEMONIC(rdtscp, "rdtscp");
674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
675 /** @todo SVM intercept removal from here. */
676 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_RDTSCP, SVM_EXIT_RDTSCP, 0, 0);
677 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
678}
679
680
681/**
682 * Group 7 jump table, memory variant.
683 */
684IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
685{
686 iemOp_Grp7_sgdt,
687 iemOp_Grp7_sidt,
688 iemOp_Grp7_lgdt,
689 iemOp_Grp7_lidt,
690 iemOp_Grp7_smsw,
691 iemOp_InvalidWithRM,
692 iemOp_Grp7_lmsw,
693 iemOp_Grp7_invlpg
694};
695
696
697/** Opcode 0x0f 0x01. */
698FNIEMOP_DEF(iemOp_Grp7)
699{
700 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
701 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
702 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
703
704 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
705 {
706 case 0:
707 switch (bRm & X86_MODRM_RM_MASK)
708 {
709 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
710 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
711 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
712 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
713 }
714 return IEMOP_RAISE_INVALID_OPCODE();
715
716 case 1:
717 switch (bRm & X86_MODRM_RM_MASK)
718 {
719 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
720 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
721 }
722 return IEMOP_RAISE_INVALID_OPCODE();
723
724 case 2:
725 switch (bRm & X86_MODRM_RM_MASK)
726 {
727 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
728 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
729 }
730 return IEMOP_RAISE_INVALID_OPCODE();
731
732 case 3:
733 switch (bRm & X86_MODRM_RM_MASK)
734 {
735 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
736 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
737 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
738 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
739 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
740 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
741 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
742 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
743 IEM_NOT_REACHED_DEFAULT_CASE_RET();
744 }
745
746 case 4:
747 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
748
749 case 5:
750 return IEMOP_RAISE_INVALID_OPCODE();
751
752 case 6:
753 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
754
755 case 7:
756 switch (bRm & X86_MODRM_RM_MASK)
757 {
758 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
759 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
760 }
761 return IEMOP_RAISE_INVALID_OPCODE();
762
763 IEM_NOT_REACHED_DEFAULT_CASE_RET();
764 }
765}
766
767/** Opcode 0x0f 0x00 /3. */
768FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
769{
770 IEMOP_HLP_NO_REAL_OR_V86_MODE();
771 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
772
773 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
774 {
775 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
776 switch (pVCpu->iem.s.enmEffOpSize)
777 {
778 case IEMMODE_16BIT:
779 {
780 IEM_MC_BEGIN(3, 0);
781 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
782 IEM_MC_ARG(uint16_t, u16Sel, 1);
783 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
784
785 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
786 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
787 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
788
789 IEM_MC_END();
790 return VINF_SUCCESS;
791 }
792
793 case IEMMODE_32BIT:
794 case IEMMODE_64BIT:
795 {
796 IEM_MC_BEGIN(3, 0);
797 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
798 IEM_MC_ARG(uint16_t, u16Sel, 1);
799 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
800
801 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
802 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
803 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
804
805 IEM_MC_END();
806 return VINF_SUCCESS;
807 }
808
809 IEM_NOT_REACHED_DEFAULT_CASE_RET();
810 }
811 }
812 else
813 {
814 switch (pVCpu->iem.s.enmEffOpSize)
815 {
816 case IEMMODE_16BIT:
817 {
818 IEM_MC_BEGIN(3, 1);
819 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
820 IEM_MC_ARG(uint16_t, u16Sel, 1);
821 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
822 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
823
824 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
825 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
826
827 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
828 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
829 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
830
831 IEM_MC_END();
832 return VINF_SUCCESS;
833 }
834
835 case IEMMODE_32BIT:
836 case IEMMODE_64BIT:
837 {
838 IEM_MC_BEGIN(3, 1);
839 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
840 IEM_MC_ARG(uint16_t, u16Sel, 1);
841 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
843
844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
845 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
846/** @todo testcase: make sure it's a 16-bit read. */
847
848 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
849 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
850 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
851
852 IEM_MC_END();
853 return VINF_SUCCESS;
854 }
855
856 IEM_NOT_REACHED_DEFAULT_CASE_RET();
857 }
858 }
859}
860
861
862
863/** Opcode 0x0f 0x02. */
864FNIEMOP_DEF(iemOp_lar_Gv_Ew)
865{
866 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
867 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
868}
869
870
871/** Opcode 0x0f 0x03. */
872FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
873{
874 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
875 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
876}
877
878
879/** Opcode 0x0f 0x05. */
880FNIEMOP_DEF(iemOp_syscall)
881{
882 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
884 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
885}
886
887
888/** Opcode 0x0f 0x06. */
889FNIEMOP_DEF(iemOp_clts)
890{
891 IEMOP_MNEMONIC(clts, "clts");
892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
893 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
894}
895
896
897/** Opcode 0x0f 0x07. */
898FNIEMOP_DEF(iemOp_sysret)
899{
900 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
902 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
903}
904
905
906/** Opcode 0x0f 0x08. */
907FNIEMOP_DEF(iemOp_invd)
908{
909 IEMOP_MNEMONIC(invd, "invd");
910#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
911 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
912 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_INVD, SVM_EXIT_INVD, 0, 0);
913#else
914 RT_NOREF_PV(pVCpu);
915#endif
916 /** @todo implement invd for the regular case (above only handles nested SVM
917 * exits). */
918 IEMOP_BITCH_ABOUT_STUB();
919 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
920}
921
922// IEMOP_HLP_MIN_486();
923
924
925/** Opcode 0x0f 0x09. */
926FNIEMOP_DEF(iemOp_wbinvd)
927{
928 IEMOP_MNEMONIC(wbinvd, "wbinvd");
929 IEMOP_HLP_MIN_486();
930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
931 IEM_MC_BEGIN(0, 0);
932 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
933 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_WBINVD, SVM_EXIT_WBINVD, 0, 0);
934 IEM_MC_ADVANCE_RIP();
935 IEM_MC_END();
936 return VINF_SUCCESS; /* ignore for now */
937}
938
939
940/** Opcode 0x0f 0x0b. */
941FNIEMOP_DEF(iemOp_ud2)
942{
943 IEMOP_MNEMONIC(ud2, "ud2");
944 return IEMOP_RAISE_INVALID_OPCODE();
945}
946
947/** Opcode 0x0f 0x0d. */
948FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
949{
950 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
951 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
952 {
953 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
954 return IEMOP_RAISE_INVALID_OPCODE();
955 }
956
957 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
958 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
959 {
960 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
961 return IEMOP_RAISE_INVALID_OPCODE();
962 }
963
964 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
965 {
966 case 2: /* Aliased to /0 for the time being. */
967 case 4: /* Aliased to /0 for the time being. */
968 case 5: /* Aliased to /0 for the time being. */
969 case 6: /* Aliased to /0 for the time being. */
970 case 7: /* Aliased to /0 for the time being. */
971 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
972 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
973 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
974 IEM_NOT_REACHED_DEFAULT_CASE_RET();
975 }
976
977 IEM_MC_BEGIN(0, 1);
978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
979 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
981 /* Currently a NOP. */
982 NOREF(GCPtrEffSrc);
983 IEM_MC_ADVANCE_RIP();
984 IEM_MC_END();
985 return VINF_SUCCESS;
986}
987
988
989/** Opcode 0x0f 0x0e. */
990FNIEMOP_DEF(iemOp_femms)
991{
992 IEMOP_MNEMONIC(femms, "femms");
993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
994
995 IEM_MC_BEGIN(0,0);
996 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
997 IEM_MC_MAYBE_RAISE_FPU_XCPT();
998 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
999 IEM_MC_FPU_FROM_MMX_MODE();
1000 IEM_MC_ADVANCE_RIP();
1001 IEM_MC_END();
1002 return VINF_SUCCESS;
1003}
1004
1005
1006/** Opcode 0x0f 0x0f. */
1007FNIEMOP_DEF(iemOp_3Dnow)
1008{
1009 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1010 {
1011 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1012 return IEMOP_RAISE_INVALID_OPCODE();
1013 }
1014
1015#ifdef IEM_WITH_3DNOW
1016 /* This is pretty sparse, use switch instead of table. */
1017 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1018 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
1019#else
1020 IEMOP_BITCH_ABOUT_STUB();
1021 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1022#endif
1023}
1024
1025
1026/**
1027 * @opcode 0x10
1028 * @oppfx none
1029 * @opcpuid sse
1030 * @opgroup og_sse_simdfp_datamove
1031 * @opxcpttype 4UA
1032 * @optest op1=1 op2=2 -> op1=2
1033 * @optest op1=0 op2=-22 -> op1=-22
1034 */
1035FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1036{
1037 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1038 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1039 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1040 {
1041 /*
1042 * Register, register.
1043 */
1044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1045 IEM_MC_BEGIN(0, 0);
1046 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1047 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1048 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1049 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1050 IEM_MC_ADVANCE_RIP();
1051 IEM_MC_END();
1052 }
1053 else
1054 {
1055 /*
1056 * Memory, register.
1057 */
1058 IEM_MC_BEGIN(0, 2);
1059 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1061
1062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1064 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1065 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1066
1067 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1068 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1069
1070 IEM_MC_ADVANCE_RIP();
1071 IEM_MC_END();
1072 }
1073 return VINF_SUCCESS;
1074
1075}
1076
1077
1078/**
1079 * @opcode 0x10
1080 * @oppfx 0x66
1081 * @opcpuid sse2
1082 * @opgroup og_sse2_pcksclr_datamove
1083 * @opxcpttype 4UA
1084 * @optest op1=1 op2=2 -> op1=2
1085 * @optest op1=0 op2=-42 -> op1=-42
1086 */
1087FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1088{
1089 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1091 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1092 {
1093 /*
1094 * Register, register.
1095 */
1096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1097 IEM_MC_BEGIN(0, 0);
1098 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1099 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1100 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1101 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1102 IEM_MC_ADVANCE_RIP();
1103 IEM_MC_END();
1104 }
1105 else
1106 {
1107 /*
1108 * Memory, register.
1109 */
1110 IEM_MC_BEGIN(0, 2);
1111 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1113
1114 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1116 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1117 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1118
1119 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1120 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1121
1122 IEM_MC_ADVANCE_RIP();
1123 IEM_MC_END();
1124 }
1125 return VINF_SUCCESS;
1126}
1127
1128
1129/**
1130 * @opcode 0x10
1131 * @oppfx 0xf3
1132 * @opcpuid sse
1133 * @opgroup og_sse_simdfp_datamove
1134 * @opxcpttype 5
1135 * @optest op1=1 op2=2 -> op1=2
1136 * @optest op1=0 op2=-22 -> op1=-22
1137 */
1138FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1139{
1140 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1141 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1142 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1143 {
1144 /*
1145 * Register, register.
1146 */
1147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1148 IEM_MC_BEGIN(0, 1);
1149 IEM_MC_LOCAL(uint32_t, uSrc);
1150
1151 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1152 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1153 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1154 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1155
1156 IEM_MC_ADVANCE_RIP();
1157 IEM_MC_END();
1158 }
1159 else
1160 {
1161 /*
1162 * Memory, register.
1163 */
1164 IEM_MC_BEGIN(0, 2);
1165 IEM_MC_LOCAL(uint32_t, uSrc);
1166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1167
1168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1170 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1171 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1172
1173 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1174 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1175
1176 IEM_MC_ADVANCE_RIP();
1177 IEM_MC_END();
1178 }
1179 return VINF_SUCCESS;
1180}
1181
1182
1183/**
1184 * @opcode 0x10
1185 * @oppfx 0xf2
1186 * @opcpuid sse2
1187 * @opgroup og_sse2_pcksclr_datamove
1188 * @opxcpttype 5
1189 * @optest op1=1 op2=2 -> op1=2
1190 * @optest op1=0 op2=-42 -> op1=-42
1191 */
1192FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1193{
1194 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1196 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1197 {
1198 /*
1199 * Register, register.
1200 */
1201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1202 IEM_MC_BEGIN(0, 1);
1203 IEM_MC_LOCAL(uint64_t, uSrc);
1204
1205 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1206 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1207 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1208 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1209
1210 IEM_MC_ADVANCE_RIP();
1211 IEM_MC_END();
1212 }
1213 else
1214 {
1215 /*
1216 * Memory, register.
1217 */
1218 IEM_MC_BEGIN(0, 2);
1219 IEM_MC_LOCAL(uint64_t, uSrc);
1220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1221
1222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1224 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1225 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1226
1227 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1228 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1229
1230 IEM_MC_ADVANCE_RIP();
1231 IEM_MC_END();
1232 }
1233 return VINF_SUCCESS;
1234}
1235
1236
1237/**
1238 * @opcode 0x11
1239 * @oppfx none
1240 * @opcpuid sse
1241 * @opgroup og_sse_simdfp_datamove
1242 * @opxcpttype 4UA
1243 * @optest op1=1 op2=2 -> op1=2
1244 * @optest op1=0 op2=-42 -> op1=-42
1245 */
1246FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1247{
1248 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1249 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1250 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1251 {
1252 /*
1253 * Register, register.
1254 */
1255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1256 IEM_MC_BEGIN(0, 0);
1257 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1258 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1259 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1260 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1261 IEM_MC_ADVANCE_RIP();
1262 IEM_MC_END();
1263 }
1264 else
1265 {
1266 /*
1267 * Memory, register.
1268 */
1269 IEM_MC_BEGIN(0, 2);
1270 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1272
1273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1275 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1276 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1277
1278 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1279 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1280
1281 IEM_MC_ADVANCE_RIP();
1282 IEM_MC_END();
1283 }
1284 return VINF_SUCCESS;
1285}
1286
1287
1288/**
1289 * @opcode 0x11
1290 * @oppfx 0x66
1291 * @opcpuid sse2
1292 * @opgroup og_sse2_pcksclr_datamove
1293 * @opxcpttype 4UA
1294 * @optest op1=1 op2=2 -> op1=2
1295 * @optest op1=0 op2=-42 -> op1=-42
1296 */
1297FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1298{
1299 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1301 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1302 {
1303 /*
1304 * Register, register.
1305 */
1306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1307 IEM_MC_BEGIN(0, 0);
1308 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1309 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1310 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1311 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1312 IEM_MC_ADVANCE_RIP();
1313 IEM_MC_END();
1314 }
1315 else
1316 {
1317 /*
1318 * Memory, register.
1319 */
1320 IEM_MC_BEGIN(0, 2);
1321 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1323
1324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1326 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1327 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1328
1329 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1330 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1331
1332 IEM_MC_ADVANCE_RIP();
1333 IEM_MC_END();
1334 }
1335 return VINF_SUCCESS;
1336}
1337
1338
1339/**
1340 * @opcode 0x11
1341 * @oppfx 0xf3
1342 * @opcpuid sse
1343 * @opgroup og_sse_simdfp_datamove
1344 * @opxcpttype 5
1345 * @optest op1=1 op2=2 -> op1=2
1346 * @optest op1=0 op2=-22 -> op1=-22
1347 */
1348FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1349{
1350 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1351 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1352 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1353 {
1354 /*
1355 * Register, register.
1356 */
1357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1358 IEM_MC_BEGIN(0, 1);
1359 IEM_MC_LOCAL(uint32_t, uSrc);
1360
1361 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1362 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1363 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1364 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1365
1366 IEM_MC_ADVANCE_RIP();
1367 IEM_MC_END();
1368 }
1369 else
1370 {
1371 /*
1372 * Memory, register.
1373 */
1374 IEM_MC_BEGIN(0, 2);
1375 IEM_MC_LOCAL(uint32_t, uSrc);
1376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1377
1378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1380 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1381 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1382
1383 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1384 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1385
1386 IEM_MC_ADVANCE_RIP();
1387 IEM_MC_END();
1388 }
1389 return VINF_SUCCESS;
1390}
1391
1392
1393/**
1394 * @opcode 0x11
1395 * @oppfx 0xf2
1396 * @opcpuid sse2
1397 * @opgroup og_sse2_pcksclr_datamove
1398 * @opxcpttype 5
1399 * @optest op1=1 op2=2 -> op1=2
1400 * @optest op1=0 op2=-42 -> op1=-42
1401 */
1402FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1403{
1404 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1406 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1407 {
1408 /*
1409 * Register, register.
1410 */
1411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1412 IEM_MC_BEGIN(0, 1);
1413 IEM_MC_LOCAL(uint64_t, uSrc);
1414
1415 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1416 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1417 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1418 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1419
1420 IEM_MC_ADVANCE_RIP();
1421 IEM_MC_END();
1422 }
1423 else
1424 {
1425 /*
1426 * Memory, register.
1427 */
1428 IEM_MC_BEGIN(0, 2);
1429 IEM_MC_LOCAL(uint64_t, uSrc);
1430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1431
1432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1434 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1435 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1436
1437 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1438 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1439
1440 IEM_MC_ADVANCE_RIP();
1441 IEM_MC_END();
1442 }
1443 return VINF_SUCCESS;
1444}
1445
1446
1447FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1448{
1449 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1450 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1451 {
1452 /**
1453 * @opcode 0x12
1454 * @opcodesub 11 mr/reg
1455 * @oppfx none
1456 * @opcpuid sse
1457 * @opgroup og_sse_simdfp_datamove
1458 * @opxcpttype 5
1459 * @optest op1=1 op2=2 -> op1=2
1460 * @optest op1=0 op2=-42 -> op1=-42
1461 */
1462 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1463
1464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1465 IEM_MC_BEGIN(0, 1);
1466 IEM_MC_LOCAL(uint64_t, uSrc);
1467
1468 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1469 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1470 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1471 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1472
1473 IEM_MC_ADVANCE_RIP();
1474 IEM_MC_END();
1475 }
1476 else
1477 {
1478 /**
1479 * @opdone
1480 * @opcode 0x12
1481 * @opcodesub !11 mr/reg
1482 * @oppfx none
1483 * @opcpuid sse
1484 * @opgroup og_sse_simdfp_datamove
1485 * @opxcpttype 5
1486 * @optest op1=1 op2=2 -> op1=2
1487 * @optest op1=0 op2=-42 -> op1=-42
1488 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1489 */
1490 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1491
1492 IEM_MC_BEGIN(0, 2);
1493 IEM_MC_LOCAL(uint64_t, uSrc);
1494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1495
1496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1498 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1499 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1500
1501 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1502 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1503
1504 IEM_MC_ADVANCE_RIP();
1505 IEM_MC_END();
1506 }
1507 return VINF_SUCCESS;
1508}
1509
1510
1511/**
1512 * @opcode 0x12
1513 * @opcodesub !11 mr/reg
1514 * @oppfx 0x66
1515 * @opcpuid sse2
1516 * @opgroup og_sse2_pcksclr_datamove
1517 * @opxcpttype 5
1518 * @optest op1=1 op2=2 -> op1=2
1519 * @optest op1=0 op2=-42 -> op1=-42
1520 */
1521FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1522{
1523 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1524 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1525 {
1526 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1527
1528 IEM_MC_BEGIN(0, 2);
1529 IEM_MC_LOCAL(uint64_t, uSrc);
1530 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1531
1532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1534 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1535 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1536
1537 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1538 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1539
1540 IEM_MC_ADVANCE_RIP();
1541 IEM_MC_END();
1542 return VINF_SUCCESS;
1543 }
1544
1545 /**
1546 * @opdone
1547 * @opmnemonic ud660f12m3
1548 * @opcode 0x12
1549 * @opcodesub 11 mr/reg
1550 * @oppfx 0x66
1551 * @opunused immediate
1552 * @opcpuid sse
1553 * @optest ->
1554 */
1555 return IEMOP_RAISE_INVALID_OPCODE();
1556}
1557
1558
1559/**
1560 * @opcode 0x12
1561 * @oppfx 0xf3
1562 * @opcpuid sse3
1563 * @opgroup og_sse3_pcksclr_datamove
1564 * @opxcpttype 4
1565 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1566 * op1=0x00000002000000020000000100000001
1567 */
1568FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1569{
1570 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1571 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1572 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1573 {
1574 /*
1575 * Register, register.
1576 */
1577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1578 IEM_MC_BEGIN(2, 0);
1579 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1580 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1581
1582 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1583 IEM_MC_PREPARE_SSE_USAGE();
1584
1585 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1586 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1587 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1588
1589 IEM_MC_ADVANCE_RIP();
1590 IEM_MC_END();
1591 }
1592 else
1593 {
1594 /*
1595 * Register, memory.
1596 */
1597 IEM_MC_BEGIN(2, 2);
1598 IEM_MC_LOCAL(RTUINT128U, uSrc);
1599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1600 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1601 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1602
1603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1605 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1606 IEM_MC_PREPARE_SSE_USAGE();
1607
1608 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1609 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1610 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1611
1612 IEM_MC_ADVANCE_RIP();
1613 IEM_MC_END();
1614 }
1615 return VINF_SUCCESS;
1616}
1617
1618
1619/**
1620 * @opcode 0x12
1621 * @oppfx 0xf2
1622 * @opcpuid sse3
1623 * @opgroup og_sse3_pcksclr_datamove
1624 * @opxcpttype 5
1625 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1626 * op1=0x22222222111111112222222211111111
1627 */
1628FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1629{
1630 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1631 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1632 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1633 {
1634 /*
1635 * Register, register.
1636 */
1637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1638 IEM_MC_BEGIN(2, 0);
1639 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1640 IEM_MC_ARG(uint64_t, uSrc, 1);
1641
1642 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1643 IEM_MC_PREPARE_SSE_USAGE();
1644
1645 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1646 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1647 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1648
1649 IEM_MC_ADVANCE_RIP();
1650 IEM_MC_END();
1651 }
1652 else
1653 {
1654 /*
1655 * Register, memory.
1656 */
1657 IEM_MC_BEGIN(2, 2);
1658 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1659 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1660 IEM_MC_ARG(uint64_t, uSrc, 1);
1661
1662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1664 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1665 IEM_MC_PREPARE_SSE_USAGE();
1666
1667 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1668 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1669 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1670
1671 IEM_MC_ADVANCE_RIP();
1672 IEM_MC_END();
1673 }
1674 return VINF_SUCCESS;
1675}
1676
1677
1678/**
1679 * @opcode 0x13
1680 * @opcodesub !11 mr/reg
1681 * @oppfx none
1682 * @opcpuid sse
1683 * @opgroup og_sse_simdfp_datamove
1684 * @opxcpttype 5
1685 * @optest op1=1 op2=2 -> op1=2
1686 * @optest op1=0 op2=-42 -> op1=-42
1687 */
1688FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1689{
1690 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1691 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1692 {
1693 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1694
1695 IEM_MC_BEGIN(0, 2);
1696 IEM_MC_LOCAL(uint64_t, uSrc);
1697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1698
1699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1701 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1702 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1703
1704 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1705 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1706
1707 IEM_MC_ADVANCE_RIP();
1708 IEM_MC_END();
1709 return VINF_SUCCESS;
1710 }
1711
1712 /**
1713 * @opdone
1714 * @opmnemonic ud0f13m3
1715 * @opcode 0x13
1716 * @opcodesub 11 mr/reg
1717 * @oppfx none
1718 * @opunused immediate
1719 * @opcpuid sse
1720 * @optest ->
1721 */
1722 return IEMOP_RAISE_INVALID_OPCODE();
1723}
1724
1725
1726/**
1727 * @opcode 0x13
1728 * @opcodesub !11 mr/reg
1729 * @oppfx 0x66
1730 * @opcpuid sse2
1731 * @opgroup og_sse2_pcksclr_datamove
1732 * @opxcpttype 5
1733 * @optest op1=1 op2=2 -> op1=2
1734 * @optest op1=0 op2=-42 -> op1=-42
1735 */
1736FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1737{
1738 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1739 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1740 {
1741 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1742 IEM_MC_BEGIN(0, 2);
1743 IEM_MC_LOCAL(uint64_t, uSrc);
1744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1745
1746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1748 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1749 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1750
1751 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1752 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1753
1754 IEM_MC_ADVANCE_RIP();
1755 IEM_MC_END();
1756 return VINF_SUCCESS;
1757 }
1758
1759 /**
1760 * @opdone
1761 * @opmnemonic ud660f13m3
1762 * @opcode 0x13
1763 * @opcodesub 11 mr/reg
1764 * @oppfx 0x66
1765 * @opunused immediate
1766 * @opcpuid sse
1767 * @optest ->
1768 */
1769 return IEMOP_RAISE_INVALID_OPCODE();
1770}
1771
1772
1773/**
1774 * @opmnemonic udf30f13
1775 * @opcode 0x13
1776 * @oppfx 0xf3
1777 * @opunused intel-modrm
1778 * @opcpuid sse
1779 * @optest ->
1780 * @opdone
1781 */
1782
1783/**
1784 * @opmnemonic udf20f13
1785 * @opcode 0x13
1786 * @oppfx 0xf2
1787 * @opunused intel-modrm
1788 * @opcpuid sse
1789 * @optest ->
1790 * @opdone
1791 */
1792
1793/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1794FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1795/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1796FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1797
1798/**
1799 * @opdone
1800 * @opmnemonic udf30f14
1801 * @opcode 0x14
1802 * @oppfx 0xf3
1803 * @opunused intel-modrm
1804 * @opcpuid sse
1805 * @optest ->
1806 * @opdone
1807 */
1808
1809/**
1810 * @opmnemonic udf20f14
1811 * @opcode 0x14
1812 * @oppfx 0xf2
1813 * @opunused intel-modrm
1814 * @opcpuid sse
1815 * @optest ->
1816 * @opdone
1817 */
1818
1819/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1820FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1821/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1822FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1823/* Opcode 0xf3 0x0f 0x15 - invalid */
1824/* Opcode 0xf2 0x0f 0x15 - invalid */
1825
1826/**
1827 * @opdone
1828 * @opmnemonic udf30f15
1829 * @opcode 0x15
1830 * @oppfx 0xf3
1831 * @opunused intel-modrm
1832 * @opcpuid sse
1833 * @optest ->
1834 * @opdone
1835 */
1836
1837/**
1838 * @opmnemonic udf20f15
1839 * @opcode 0x15
1840 * @oppfx 0xf2
1841 * @opunused intel-modrm
1842 * @opcpuid sse
1843 * @optest ->
1844 * @opdone
1845 */
1846
1847FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1848{
1849 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1850 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1851 {
1852 /**
1853 * @opcode 0x16
1854 * @opcodesub 11 mr/reg
1855 * @oppfx none
1856 * @opcpuid sse
1857 * @opgroup og_sse_simdfp_datamove
1858 * @opxcpttype 5
1859 * @optest op1=1 op2=2 -> op1=2
1860 * @optest op1=0 op2=-42 -> op1=-42
1861 */
1862 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1863
1864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1865 IEM_MC_BEGIN(0, 1);
1866 IEM_MC_LOCAL(uint64_t, uSrc);
1867
1868 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1869 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1870 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1871 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1872
1873 IEM_MC_ADVANCE_RIP();
1874 IEM_MC_END();
1875 }
1876 else
1877 {
1878 /**
1879 * @opdone
1880 * @opcode 0x16
1881 * @opcodesub !11 mr/reg
1882 * @oppfx none
1883 * @opcpuid sse
1884 * @opgroup og_sse_simdfp_datamove
1885 * @opxcpttype 5
1886 * @optest op1=1 op2=2 -> op1=2
1887 * @optest op1=0 op2=-42 -> op1=-42
1888 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1889 */
1890 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1891
1892 IEM_MC_BEGIN(0, 2);
1893 IEM_MC_LOCAL(uint64_t, uSrc);
1894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1895
1896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1898 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1899 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1900
1901 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1902 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1903
1904 IEM_MC_ADVANCE_RIP();
1905 IEM_MC_END();
1906 }
1907 return VINF_SUCCESS;
1908}
1909
1910
1911/**
1912 * @opcode 0x16
1913 * @opcodesub !11 mr/reg
1914 * @oppfx 0x66
1915 * @opcpuid sse2
1916 * @opgroup og_sse2_pcksclr_datamove
1917 * @opxcpttype 5
1918 * @optest op1=1 op2=2 -> op1=2
1919 * @optest op1=0 op2=-42 -> op1=-42
1920 */
1921FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1922{
1923 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1924 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1925 {
1926 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1927 IEM_MC_BEGIN(0, 2);
1928 IEM_MC_LOCAL(uint64_t, uSrc);
1929 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1930
1931 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1933 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1934 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1935
1936 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1937 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1938
1939 IEM_MC_ADVANCE_RIP();
1940 IEM_MC_END();
1941 return VINF_SUCCESS;
1942 }
1943
1944 /**
1945 * @opdone
1946 * @opmnemonic ud660f16m3
1947 * @opcode 0x16
1948 * @opcodesub 11 mr/reg
1949 * @oppfx 0x66
1950 * @opunused immediate
1951 * @opcpuid sse
1952 * @optest ->
1953 */
1954 return IEMOP_RAISE_INVALID_OPCODE();
1955}
1956
1957
1958/**
1959 * @opcode 0x16
1960 * @oppfx 0xf3
1961 * @opcpuid sse3
1962 * @opgroup og_sse3_pcksclr_datamove
1963 * @opxcpttype 4
1964 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1965 * op1=0x00000002000000020000000100000001
1966 */
1967FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1968{
1969 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1970 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1971 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1972 {
1973 /*
1974 * Register, register.
1975 */
1976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1977 IEM_MC_BEGIN(2, 0);
1978 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1979 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1980
1981 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1982 IEM_MC_PREPARE_SSE_USAGE();
1983
1984 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1985 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1986 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1987
1988 IEM_MC_ADVANCE_RIP();
1989 IEM_MC_END();
1990 }
1991 else
1992 {
1993 /*
1994 * Register, memory.
1995 */
1996 IEM_MC_BEGIN(2, 2);
1997 IEM_MC_LOCAL(RTUINT128U, uSrc);
1998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1999 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2000 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2001
2002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2004 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2005 IEM_MC_PREPARE_SSE_USAGE();
2006
2007 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2008 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2009 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
2010
2011 IEM_MC_ADVANCE_RIP();
2012 IEM_MC_END();
2013 }
2014 return VINF_SUCCESS;
2015}
2016
2017/**
2018 * @opdone
2019 * @opmnemonic udf30f16
2020 * @opcode 0x16
2021 * @oppfx 0xf2
2022 * @opunused intel-modrm
2023 * @opcpuid sse
2024 * @optest ->
2025 * @opdone
2026 */
2027
2028
2029/**
2030 * @opcode 0x17
2031 * @opcodesub !11 mr/reg
2032 * @oppfx none
2033 * @opcpuid sse
2034 * @opgroup og_sse_simdfp_datamove
2035 * @opxcpttype 5
2036 * @optest op1=1 op2=2 -> op1=2
2037 * @optest op1=0 op2=-42 -> op1=-42
2038 */
2039FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
2040{
2041 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2042 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2043 {
2044 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2045
2046 IEM_MC_BEGIN(0, 2);
2047 IEM_MC_LOCAL(uint64_t, uSrc);
2048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2049
2050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2052 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2053 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2054
2055 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2056 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2057
2058 IEM_MC_ADVANCE_RIP();
2059 IEM_MC_END();
2060 return VINF_SUCCESS;
2061 }
2062
2063 /**
2064 * @opdone
2065 * @opmnemonic ud0f17m3
2066 * @opcode 0x17
2067 * @opcodesub 11 mr/reg
2068 * @oppfx none
2069 * @opunused immediate
2070 * @opcpuid sse
2071 * @optest ->
2072 */
2073 return IEMOP_RAISE_INVALID_OPCODE();
2074}
2075
2076
2077/**
2078 * @opcode 0x17
2079 * @opcodesub !11 mr/reg
2080 * @oppfx 0x66
2081 * @opcpuid sse2
2082 * @opgroup og_sse2_pcksclr_datamove
2083 * @opxcpttype 5
2084 * @optest op1=1 op2=2 -> op1=2
2085 * @optest op1=0 op2=-42 -> op1=-42
2086 */
2087FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
2088{
2089 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2090 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2091 {
2092 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2093
2094 IEM_MC_BEGIN(0, 2);
2095 IEM_MC_LOCAL(uint64_t, uSrc);
2096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2097
2098 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2100 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2101 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2102
2103 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2104 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2105
2106 IEM_MC_ADVANCE_RIP();
2107 IEM_MC_END();
2108 return VINF_SUCCESS;
2109 }
2110
2111 /**
2112 * @opdone
2113 * @opmnemonic ud660f17m3
2114 * @opcode 0x17
2115 * @opcodesub 11 mr/reg
2116 * @oppfx 0x66
2117 * @opunused immediate
2118 * @opcpuid sse
2119 * @optest ->
2120 */
2121 return IEMOP_RAISE_INVALID_OPCODE();
2122}
2123
2124
2125/**
2126 * @opdone
2127 * @opmnemonic udf30f17
2128 * @opcode 0x17
2129 * @oppfx 0xf3
2130 * @opunused intel-modrm
2131 * @opcpuid sse
2132 * @optest ->
2133 * @opdone
2134 */
2135
2136/**
2137 * @opmnemonic udf20f17
2138 * @opcode 0x17
2139 * @oppfx 0xf2
2140 * @opunused intel-modrm
2141 * @opcpuid sse
2142 * @optest ->
2143 * @opdone
2144 */
2145
2146
2147/** Opcode 0x0f 0x18. */
2148FNIEMOP_DEF(iemOp_prefetch_Grp16)
2149{
2150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2151 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2152 {
2153 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2154 {
2155 case 4: /* Aliased to /0 for the time being according to AMD. */
2156 case 5: /* Aliased to /0 for the time being according to AMD. */
2157 case 6: /* Aliased to /0 for the time being according to AMD. */
2158 case 7: /* Aliased to /0 for the time being according to AMD. */
2159 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2160 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2161 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2162 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2163 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2164 }
2165
2166 IEM_MC_BEGIN(0, 1);
2167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2170 /* Currently a NOP. */
2171 NOREF(GCPtrEffSrc);
2172 IEM_MC_ADVANCE_RIP();
2173 IEM_MC_END();
2174 return VINF_SUCCESS;
2175 }
2176
2177 return IEMOP_RAISE_INVALID_OPCODE();
2178}
2179
2180
2181/** Opcode 0x0f 0x19..0x1f. */
2182FNIEMOP_DEF(iemOp_nop_Ev)
2183{
2184 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2187 {
2188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2189 IEM_MC_BEGIN(0, 0);
2190 IEM_MC_ADVANCE_RIP();
2191 IEM_MC_END();
2192 }
2193 else
2194 {
2195 IEM_MC_BEGIN(0, 1);
2196 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2197 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2199 /* Currently a NOP. */
2200 NOREF(GCPtrEffSrc);
2201 IEM_MC_ADVANCE_RIP();
2202 IEM_MC_END();
2203 }
2204 return VINF_SUCCESS;
2205}
2206
2207
2208/** Opcode 0x0f 0x20. */
2209FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2210{
2211 /* mod is ignored, as is operand size overrides. */
2212 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2213 IEMOP_HLP_MIN_386();
2214 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2215 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2216 else
2217 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2218
2219 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2220 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2221 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2222 {
2223 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2224 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2225 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2226 iCrReg |= 8;
2227 }
2228 switch (iCrReg)
2229 {
2230 case 0: case 2: case 3: case 4: case 8:
2231 break;
2232 default:
2233 return IEMOP_RAISE_INVALID_OPCODE();
2234 }
2235 IEMOP_HLP_DONE_DECODING();
2236
2237 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2238}
2239
2240
2241/** Opcode 0x0f 0x21. */
2242FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2243{
2244 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2245 IEMOP_HLP_MIN_386();
2246 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2248 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2249 return IEMOP_RAISE_INVALID_OPCODE();
2250 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2251 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2252 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2253}
2254
2255
2256/** Opcode 0x0f 0x22. */
2257FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2258{
2259 /* mod is ignored, as is operand size overrides. */
2260 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2261 IEMOP_HLP_MIN_386();
2262 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2263 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2264 else
2265 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2266
2267 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2268 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2269 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2270 {
2271 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2272 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2273 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2274 iCrReg |= 8;
2275 }
2276 switch (iCrReg)
2277 {
2278 case 0: case 2: case 3: case 4: case 8:
2279 break;
2280 default:
2281 return IEMOP_RAISE_INVALID_OPCODE();
2282 }
2283 IEMOP_HLP_DONE_DECODING();
2284
2285 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2286}
2287
2288
2289/** Opcode 0x0f 0x23. */
2290FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2291{
2292 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2293 IEMOP_HLP_MIN_386();
2294 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2296 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2297 return IEMOP_RAISE_INVALID_OPCODE();
2298 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2299 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2300 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2301}
2302
2303
2304/** Opcode 0x0f 0x24. */
2305FNIEMOP_DEF(iemOp_mov_Rd_Td)
2306{
2307 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2308 /** @todo works on 386 and 486. */
2309 /* The RM byte is not considered, see testcase. */
2310 return IEMOP_RAISE_INVALID_OPCODE();
2311}
2312
2313
2314/** Opcode 0x0f 0x26. */
2315FNIEMOP_DEF(iemOp_mov_Td_Rd)
2316{
2317 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2318 /** @todo works on 386 and 486. */
2319 /* The RM byte is not considered, see testcase. */
2320 return IEMOP_RAISE_INVALID_OPCODE();
2321}
2322
2323
2324/**
2325 * @opcode 0x28
2326 * @oppfx none
2327 * @opcpuid sse
2328 * @opgroup og_sse_simdfp_datamove
2329 * @opxcpttype 1
2330 * @optest op1=1 op2=2 -> op1=2
2331 * @optest op1=0 op2=-42 -> op1=-42
2332 */
2333FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2334{
2335 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2336 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2337 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2338 {
2339 /*
2340 * Register, register.
2341 */
2342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2343 IEM_MC_BEGIN(0, 0);
2344 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2345 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2346 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2347 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2348 IEM_MC_ADVANCE_RIP();
2349 IEM_MC_END();
2350 }
2351 else
2352 {
2353 /*
2354 * Register, memory.
2355 */
2356 IEM_MC_BEGIN(0, 2);
2357 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2359
2360 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2362 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2363 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2364
2365 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2366 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2367
2368 IEM_MC_ADVANCE_RIP();
2369 IEM_MC_END();
2370 }
2371 return VINF_SUCCESS;
2372}
2373
2374/**
2375 * @opcode 0x28
2376 * @oppfx 66
2377 * @opcpuid sse2
2378 * @opgroup og_sse2_pcksclr_datamove
2379 * @opxcpttype 1
2380 * @optest op1=1 op2=2 -> op1=2
2381 * @optest op1=0 op2=-42 -> op1=-42
2382 */
2383FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2384{
2385 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2386 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2387 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2388 {
2389 /*
2390 * Register, register.
2391 */
2392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2393 IEM_MC_BEGIN(0, 0);
2394 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2395 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2396 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2397 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2398 IEM_MC_ADVANCE_RIP();
2399 IEM_MC_END();
2400 }
2401 else
2402 {
2403 /*
2404 * Register, memory.
2405 */
2406 IEM_MC_BEGIN(0, 2);
2407 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2409
2410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2412 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2413 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2414
2415 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2416 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2417
2418 IEM_MC_ADVANCE_RIP();
2419 IEM_MC_END();
2420 }
2421 return VINF_SUCCESS;
2422}
2423
2424/* Opcode 0xf3 0x0f 0x28 - invalid */
2425/* Opcode 0xf2 0x0f 0x28 - invalid */
2426
2427/**
2428 * @opcode 0x29
2429 * @oppfx none
2430 * @opcpuid sse
2431 * @opgroup og_sse_simdfp_datamove
2432 * @opxcpttype 1
2433 * @optest op1=1 op2=2 -> op1=2
2434 * @optest op1=0 op2=-42 -> op1=-42
2435 */
2436FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2437{
2438 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2439 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2440 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2441 {
2442 /*
2443 * Register, register.
2444 */
2445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2446 IEM_MC_BEGIN(0, 0);
2447 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2448 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2449 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2450 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2451 IEM_MC_ADVANCE_RIP();
2452 IEM_MC_END();
2453 }
2454 else
2455 {
2456 /*
2457 * Memory, register.
2458 */
2459 IEM_MC_BEGIN(0, 2);
2460 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2462
2463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2465 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2466 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2467
2468 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2469 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2470
2471 IEM_MC_ADVANCE_RIP();
2472 IEM_MC_END();
2473 }
2474 return VINF_SUCCESS;
2475}
2476
2477/**
2478 * @opcode 0x29
2479 * @oppfx 66
2480 * @opcpuid sse2
2481 * @opgroup og_sse2_pcksclr_datamove
2482 * @opxcpttype 1
2483 * @optest op1=1 op2=2 -> op1=2
2484 * @optest op1=0 op2=-42 -> op1=-42
2485 */
2486FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2487{
2488 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2490 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2491 {
2492 /*
2493 * Register, register.
2494 */
2495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2496 IEM_MC_BEGIN(0, 0);
2497 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2498 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2499 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2500 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2501 IEM_MC_ADVANCE_RIP();
2502 IEM_MC_END();
2503 }
2504 else
2505 {
2506 /*
2507 * Memory, register.
2508 */
2509 IEM_MC_BEGIN(0, 2);
2510 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2512
2513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2515 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2516 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2517
2518 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2519 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2520
2521 IEM_MC_ADVANCE_RIP();
2522 IEM_MC_END();
2523 }
2524 return VINF_SUCCESS;
2525}
2526
2527/* Opcode 0xf3 0x0f 0x29 - invalid */
2528/* Opcode 0xf2 0x0f 0x29 - invalid */
2529
2530
2531/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2532FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2533/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2534FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2535/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2536FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2537/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2538FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2539
2540
2541/**
2542 * @opcode 0x2b
2543 * @opcodesub !11 mr/reg
2544 * @oppfx none
2545 * @opcpuid sse
2546 * @opgroup og_sse1_cachect
2547 * @opxcpttype 1
2548 * @optest op1=1 op2=2 -> op1=2
2549 * @optest op1=0 op2=-42 -> op1=-42
2550 */
2551FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2552{
2553 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2554 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2555 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2556 {
2557 /*
2558 * memory, register.
2559 */
2560 IEM_MC_BEGIN(0, 2);
2561 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2563
2564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2566 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2567 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2568
2569 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2570 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2571
2572 IEM_MC_ADVANCE_RIP();
2573 IEM_MC_END();
2574 }
2575 /* The register, register encoding is invalid. */
2576 else
2577 return IEMOP_RAISE_INVALID_OPCODE();
2578 return VINF_SUCCESS;
2579}
2580
2581/**
2582 * @opcode 0x2b
2583 * @opcodesub !11 mr/reg
2584 * @oppfx 0x66
2585 * @opcpuid sse2
2586 * @opgroup og_sse2_cachect
2587 * @opxcpttype 1
2588 * @optest op1=1 op2=2 -> op1=2
2589 * @optest op1=0 op2=-42 -> op1=-42
2590 */
2591FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2592{
2593 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2594 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2595 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2596 {
2597 /*
2598 * memory, register.
2599 */
2600 IEM_MC_BEGIN(0, 2);
2601 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2603
2604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2606 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2607 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2608
2609 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2610 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2611
2612 IEM_MC_ADVANCE_RIP();
2613 IEM_MC_END();
2614 }
2615 /* The register, register encoding is invalid. */
2616 else
2617 return IEMOP_RAISE_INVALID_OPCODE();
2618 return VINF_SUCCESS;
2619}
2620/* Opcode 0xf3 0x0f 0x2b - invalid */
2621/* Opcode 0xf2 0x0f 0x2b - invalid */
2622
2623
2624/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2625FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2626/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2627FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2628/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2629FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2630/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2631FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2632
2633/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2634FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2635/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2636FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2637/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2638FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2639/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2640FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2641
2642/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2643FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2644/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2645FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2646/* Opcode 0xf3 0x0f 0x2e - invalid */
2647/* Opcode 0xf2 0x0f 0x2e - invalid */
2648
2649/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2650FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2651/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2652FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2653/* Opcode 0xf3 0x0f 0x2f - invalid */
2654/* Opcode 0xf2 0x0f 0x2f - invalid */
2655
2656/** Opcode 0x0f 0x30. */
2657FNIEMOP_DEF(iemOp_wrmsr)
2658{
2659 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2661 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2662}
2663
2664
2665/** Opcode 0x0f 0x31. */
2666FNIEMOP_DEF(iemOp_rdtsc)
2667{
2668 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2670 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2671}
2672
2673
2674/** Opcode 0x0f 0x33. */
2675FNIEMOP_DEF(iemOp_rdmsr)
2676{
2677 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2679 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2680}
2681
2682
2683/** Opcode 0x0f 0x34. */
2684FNIEMOP_DEF(iemOp_rdpmc)
2685{
2686 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2688 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2689}
2690
2691
2692/** Opcode 0x0f 0x34. */
2693FNIEMOP_STUB(iemOp_sysenter);
2694/** Opcode 0x0f 0x35. */
2695FNIEMOP_STUB(iemOp_sysexit);
2696/** Opcode 0x0f 0x37. */
2697FNIEMOP_STUB(iemOp_getsec);
2698
2699
2700/** Opcode 0x0f 0x38. */
2701FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2702{
2703#ifdef IEM_WITH_THREE_0F_38
2704 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2705 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2706#else
2707 IEMOP_BITCH_ABOUT_STUB();
2708 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2709#endif
2710}
2711
2712
2713/** Opcode 0x0f 0x3a. */
2714FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2715{
2716#ifdef IEM_WITH_THREE_0F_3A
2717 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2718 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2719#else
2720 IEMOP_BITCH_ABOUT_STUB();
2721 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2722#endif
2723}
2724
2725
2726/**
2727 * Implements a conditional move.
2728 *
2729 * Wish there was an obvious way to do this where we could share and reduce
2730 * code bloat.
2731 *
2732 * @param a_Cnd The conditional "microcode" operation.
2733 */
2734#define CMOV_X(a_Cnd) \
2735 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2736 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2737 { \
2738 switch (pVCpu->iem.s.enmEffOpSize) \
2739 { \
2740 case IEMMODE_16BIT: \
2741 IEM_MC_BEGIN(0, 1); \
2742 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2743 a_Cnd { \
2744 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2745 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2746 } IEM_MC_ENDIF(); \
2747 IEM_MC_ADVANCE_RIP(); \
2748 IEM_MC_END(); \
2749 return VINF_SUCCESS; \
2750 \
2751 case IEMMODE_32BIT: \
2752 IEM_MC_BEGIN(0, 1); \
2753 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2754 a_Cnd { \
2755 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2756 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2757 } IEM_MC_ELSE() { \
2758 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2759 } IEM_MC_ENDIF(); \
2760 IEM_MC_ADVANCE_RIP(); \
2761 IEM_MC_END(); \
2762 return VINF_SUCCESS; \
2763 \
2764 case IEMMODE_64BIT: \
2765 IEM_MC_BEGIN(0, 1); \
2766 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2767 a_Cnd { \
2768 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2769 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2770 } IEM_MC_ENDIF(); \
2771 IEM_MC_ADVANCE_RIP(); \
2772 IEM_MC_END(); \
2773 return VINF_SUCCESS; \
2774 \
2775 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2776 } \
2777 } \
2778 else \
2779 { \
2780 switch (pVCpu->iem.s.enmEffOpSize) \
2781 { \
2782 case IEMMODE_16BIT: \
2783 IEM_MC_BEGIN(0, 2); \
2784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2785 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2787 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2788 a_Cnd { \
2789 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2790 } IEM_MC_ENDIF(); \
2791 IEM_MC_ADVANCE_RIP(); \
2792 IEM_MC_END(); \
2793 return VINF_SUCCESS; \
2794 \
2795 case IEMMODE_32BIT: \
2796 IEM_MC_BEGIN(0, 2); \
2797 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2798 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2800 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2801 a_Cnd { \
2802 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2803 } IEM_MC_ELSE() { \
2804 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2805 } IEM_MC_ENDIF(); \
2806 IEM_MC_ADVANCE_RIP(); \
2807 IEM_MC_END(); \
2808 return VINF_SUCCESS; \
2809 \
2810 case IEMMODE_64BIT: \
2811 IEM_MC_BEGIN(0, 2); \
2812 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2813 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2815 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2816 a_Cnd { \
2817 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2818 } IEM_MC_ENDIF(); \
2819 IEM_MC_ADVANCE_RIP(); \
2820 IEM_MC_END(); \
2821 return VINF_SUCCESS; \
2822 \
2823 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2824 } \
2825 } do {} while (0)
2826
2827
2828
2829/** Opcode 0x0f 0x40. */
2830FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2831{
2832 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2833 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2834}
2835
2836
2837/** Opcode 0x0f 0x41. */
2838FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2839{
2840 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2841 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2842}
2843
2844
2845/** Opcode 0x0f 0x42. */
2846FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2847{
2848 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2849 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2850}
2851
2852
2853/** Opcode 0x0f 0x43. */
2854FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2855{
2856 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2857 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2858}
2859
2860
2861/** Opcode 0x0f 0x44. */
2862FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2863{
2864 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2865 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2866}
2867
2868
2869/** Opcode 0x0f 0x45. */
2870FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2871{
2872 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2873 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2874}
2875
2876
2877/** Opcode 0x0f 0x46. */
2878FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2879{
2880 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2881 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2882}
2883
2884
2885/** Opcode 0x0f 0x47. */
2886FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2887{
2888 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2889 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2890}
2891
2892
2893/** Opcode 0x0f 0x48. */
2894FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2895{
2896 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2897 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2898}
2899
2900
2901/** Opcode 0x0f 0x49. */
2902FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2903{
2904 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2905 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2906}
2907
2908
2909/** Opcode 0x0f 0x4a. */
2910FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2911{
2912 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2913 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2914}
2915
2916
2917/** Opcode 0x0f 0x4b. */
2918FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2919{
2920 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2921 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2922}
2923
2924
2925/** Opcode 0x0f 0x4c. */
2926FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2927{
2928 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2929 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2930}
2931
2932
2933/** Opcode 0x0f 0x4d. */
2934FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2935{
2936 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2937 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2938}
2939
2940
2941/** Opcode 0x0f 0x4e. */
2942FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2943{
2944 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2945 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2946}
2947
2948
2949/** Opcode 0x0f 0x4f. */
2950FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2951{
2952 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2953 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2954}
2955
2956#undef CMOV_X
2957
2958/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2959FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2960/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2961FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2962/* Opcode 0xf3 0x0f 0x50 - invalid */
2963/* Opcode 0xf2 0x0f 0x50 - invalid */
2964
2965/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2966FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2967/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2968FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2969/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2970FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2971/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2972FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2973
2974/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2975FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2976/* Opcode 0x66 0x0f 0x52 - invalid */
2977/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2978FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2979/* Opcode 0xf2 0x0f 0x52 - invalid */
2980
2981/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2982FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2983/* Opcode 0x66 0x0f 0x53 - invalid */
2984/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2985FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2986/* Opcode 0xf2 0x0f 0x53 - invalid */
2987
2988/** Opcode 0x0f 0x54 - andps Vps, Wps */
2989FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2990/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2991FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2992/* Opcode 0xf3 0x0f 0x54 - invalid */
2993/* Opcode 0xf2 0x0f 0x54 - invalid */
2994
2995/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2996FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2997/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2998FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2999/* Opcode 0xf3 0x0f 0x55 - invalid */
3000/* Opcode 0xf2 0x0f 0x55 - invalid */
3001
3002/** Opcode 0x0f 0x56 - orps Vps, Wps */
3003FNIEMOP_STUB(iemOp_orps_Vps_Wps);
3004/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
3005FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
3006/* Opcode 0xf3 0x0f 0x56 - invalid */
3007/* Opcode 0xf2 0x0f 0x56 - invalid */
3008
3009/** Opcode 0x0f 0x57 - xorps Vps, Wps */
3010FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
3011/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
3012FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
3013/* Opcode 0xf3 0x0f 0x57 - invalid */
3014/* Opcode 0xf2 0x0f 0x57 - invalid */
3015
3016/** Opcode 0x0f 0x58 - addps Vps, Wps */
3017FNIEMOP_STUB(iemOp_addps_Vps_Wps);
3018/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
3019FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
3020/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
3021FNIEMOP_STUB(iemOp_addss_Vss_Wss);
3022/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
3023FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
3024
3025/** Opcode 0x0f 0x59 - mulps Vps, Wps */
3026FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
3027/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
3028FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
3029/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
3030FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
3031/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
3032FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
3033
3034/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
3035FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
3036/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
3037FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
3038/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
3039FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
3040/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
3041FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
3042
3043/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
3044FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
3045/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
3046FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
3047/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
3048FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
3049/* Opcode 0xf2 0x0f 0x5b - invalid */
3050
3051/** Opcode 0x0f 0x5c - subps Vps, Wps */
3052FNIEMOP_STUB(iemOp_subps_Vps_Wps);
3053/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
3054FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
3055/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
3056FNIEMOP_STUB(iemOp_subss_Vss_Wss);
3057/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
3058FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
3059
3060/** Opcode 0x0f 0x5d - minps Vps, Wps */
3061FNIEMOP_STUB(iemOp_minps_Vps_Wps);
3062/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
3063FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
3064/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
3065FNIEMOP_STUB(iemOp_minss_Vss_Wss);
3066/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
3067FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
3068
3069/** Opcode 0x0f 0x5e - divps Vps, Wps */
3070FNIEMOP_STUB(iemOp_divps_Vps_Wps);
3071/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
3072FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
3073/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
3074FNIEMOP_STUB(iemOp_divss_Vss_Wss);
3075/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
3076FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
3077
3078/** Opcode 0x0f 0x5f - maxps Vps, Wps */
3079FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
3080/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
3081FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
3082/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
3083FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
3084/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
3085FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
3086
3087/**
3088 * Common worker for MMX instructions on the forms:
3089 * pxxxx mm1, mm2/mem32
3090 *
3091 * The 2nd operand is the first half of a register, which in the memory case
3092 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3093 * memory accessed for MMX.
3094 *
3095 * Exceptions type 4.
3096 */
3097FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3098{
3099 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3101 {
3102 /*
3103 * Register, register.
3104 */
3105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3106 IEM_MC_BEGIN(2, 0);
3107 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3108 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3109 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3110 IEM_MC_PREPARE_SSE_USAGE();
3111 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3112 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3113 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3114 IEM_MC_ADVANCE_RIP();
3115 IEM_MC_END();
3116 }
3117 else
3118 {
3119 /*
3120 * Register, memory.
3121 */
3122 IEM_MC_BEGIN(2, 2);
3123 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3124 IEM_MC_LOCAL(uint64_t, uSrc);
3125 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3127
3128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3130 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3131 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3132
3133 IEM_MC_PREPARE_SSE_USAGE();
3134 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3135 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3136
3137 IEM_MC_ADVANCE_RIP();
3138 IEM_MC_END();
3139 }
3140 return VINF_SUCCESS;
3141}
3142
3143
3144/**
3145 * Common worker for SSE2 instructions on the forms:
3146 * pxxxx xmm1, xmm2/mem128
3147 *
3148 * The 2nd operand is the first half of a register, which in the memory case
3149 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3150 * memory accessed for MMX.
3151 *
3152 * Exceptions type 4.
3153 */
3154FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3155{
3156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3157 if (!pImpl->pfnU64)
3158 return IEMOP_RAISE_INVALID_OPCODE();
3159 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3160 {
3161 /*
3162 * Register, register.
3163 */
3164 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3165 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3167 IEM_MC_BEGIN(2, 0);
3168 IEM_MC_ARG(uint64_t *, pDst, 0);
3169 IEM_MC_ARG(uint32_t const *, pSrc, 1);
3170 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3171 IEM_MC_PREPARE_FPU_USAGE();
3172 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3173 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3174 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3175 IEM_MC_ADVANCE_RIP();
3176 IEM_MC_END();
3177 }
3178 else
3179 {
3180 /*
3181 * Register, memory.
3182 */
3183 IEM_MC_BEGIN(2, 2);
3184 IEM_MC_ARG(uint64_t *, pDst, 0);
3185 IEM_MC_LOCAL(uint32_t, uSrc);
3186 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3188
3189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3191 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3192 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3193
3194 IEM_MC_PREPARE_FPU_USAGE();
3195 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3196 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3197
3198 IEM_MC_ADVANCE_RIP();
3199 IEM_MC_END();
3200 }
3201 return VINF_SUCCESS;
3202}
3203
3204
3205/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3206FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3207{
3208 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3209 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3210}
3211
3212/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3213FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3214{
3215 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3216 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3217}
3218
3219/* Opcode 0xf3 0x0f 0x60 - invalid */
3220
3221
3222/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3223FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3224{
3225 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3226 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3227}
3228
3229/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3230FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3231{
3232 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3233 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3234}
3235
3236/* Opcode 0xf3 0x0f 0x61 - invalid */
3237
3238
3239/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3240FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3241{
3242 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3243 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3244}
3245
3246/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3247FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3248{
3249 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3250 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3251}
3252
3253/* Opcode 0xf3 0x0f 0x62 - invalid */
3254
3255
3256
3257/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3258FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3259/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3260FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3261/* Opcode 0xf3 0x0f 0x63 - invalid */
3262
3263/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3264FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3265/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3266FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3267/* Opcode 0xf3 0x0f 0x64 - invalid */
3268
3269/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3270FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3271/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3272FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3273/* Opcode 0xf3 0x0f 0x65 - invalid */
3274
3275/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3276FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3277/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3278FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3279/* Opcode 0xf3 0x0f 0x66 - invalid */
3280
3281/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3282FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3283/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3284FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3285/* Opcode 0xf3 0x0f 0x67 - invalid */
3286
3287
3288/**
3289 * Common worker for MMX instructions on the form:
3290 * pxxxx mm1, mm2/mem64
3291 *
3292 * The 2nd operand is the second half of a register, which in the memory case
3293 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3294 * where it may read the full 128 bits or only the upper 64 bits.
3295 *
3296 * Exceptions type 4.
3297 */
3298FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3299{
3300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3301 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3302 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3303 {
3304 /*
3305 * Register, register.
3306 */
3307 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3308 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3310 IEM_MC_BEGIN(2, 0);
3311 IEM_MC_ARG(uint64_t *, pDst, 0);
3312 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3313 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3314 IEM_MC_PREPARE_FPU_USAGE();
3315 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3316 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3317 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3318 IEM_MC_ADVANCE_RIP();
3319 IEM_MC_END();
3320 }
3321 else
3322 {
3323 /*
3324 * Register, memory.
3325 */
3326 IEM_MC_BEGIN(2, 2);
3327 IEM_MC_ARG(uint64_t *, pDst, 0);
3328 IEM_MC_LOCAL(uint64_t, uSrc);
3329 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3331
3332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3334 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3335 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3336
3337 IEM_MC_PREPARE_FPU_USAGE();
3338 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3339 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3340
3341 IEM_MC_ADVANCE_RIP();
3342 IEM_MC_END();
3343 }
3344 return VINF_SUCCESS;
3345}
3346
3347
3348/**
3349 * Common worker for SSE2 instructions on the form:
3350 * pxxxx xmm1, xmm2/mem128
3351 *
3352 * The 2nd operand is the second half of a register, which in the memory case
3353 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3354 * where it may read the full 128 bits or only the upper 64 bits.
3355 *
3356 * Exceptions type 4.
3357 */
3358FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3359{
3360 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3361 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3362 {
3363 /*
3364 * Register, register.
3365 */
3366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3367 IEM_MC_BEGIN(2, 0);
3368 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3369 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3370 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3371 IEM_MC_PREPARE_SSE_USAGE();
3372 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3373 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3374 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3375 IEM_MC_ADVANCE_RIP();
3376 IEM_MC_END();
3377 }
3378 else
3379 {
3380 /*
3381 * Register, memory.
3382 */
3383 IEM_MC_BEGIN(2, 2);
3384 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3385 IEM_MC_LOCAL(RTUINT128U, uSrc);
3386 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3388
3389 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3391 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3392 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3393
3394 IEM_MC_PREPARE_SSE_USAGE();
3395 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3396 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3397
3398 IEM_MC_ADVANCE_RIP();
3399 IEM_MC_END();
3400 }
3401 return VINF_SUCCESS;
3402}
3403
3404
3405/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3406FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3407{
3408 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3409 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3410}
3411
3412/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3413FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3414{
3415 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3416 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3417}
3418/* Opcode 0xf3 0x0f 0x68 - invalid */
3419
3420
3421/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3422FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3423{
3424 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3425 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3426}
3427
3428/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3429FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3430{
3431 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3432 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3433
3434}
3435/* Opcode 0xf3 0x0f 0x69 - invalid */
3436
3437
3438/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3439FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3440{
3441 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3442 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3443}
3444
3445/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3446FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3447{
3448 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3449 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3450}
3451/* Opcode 0xf3 0x0f 0x6a - invalid */
3452
3453
3454/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3455FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3456/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3457FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3458/* Opcode 0xf3 0x0f 0x6b - invalid */
3459
3460
3461/* Opcode 0x0f 0x6c - invalid */
3462
3463/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3464FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3465{
3466 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3467 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3468}
3469
3470/* Opcode 0xf3 0x0f 0x6c - invalid */
3471/* Opcode 0xf2 0x0f 0x6c - invalid */
3472
3473
3474/* Opcode 0x0f 0x6d - invalid */
3475
3476/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3477FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3478{
3479 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3480 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3481}
3482
3483/* Opcode 0xf3 0x0f 0x6d - invalid */
3484
3485
3486FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3487{
3488 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3489 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3490 {
3491 /**
3492 * @opcode 0x6e
3493 * @opcodesub rex.w=1
3494 * @oppfx none
3495 * @opcpuid mmx
3496 * @opgroup og_mmx_datamove
3497 * @opxcpttype 5
3498 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
3499 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
3500 */
3501 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3502 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3503 {
3504 /* MMX, greg64 */
3505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3506 IEM_MC_BEGIN(0, 1);
3507 IEM_MC_LOCAL(uint64_t, u64Tmp);
3508
3509 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3510 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3511
3512 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3513 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3514 IEM_MC_FPU_TO_MMX_MODE();
3515
3516 IEM_MC_ADVANCE_RIP();
3517 IEM_MC_END();
3518 }
3519 else
3520 {
3521 /* MMX, [mem64] */
3522 IEM_MC_BEGIN(0, 2);
3523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3524 IEM_MC_LOCAL(uint64_t, u64Tmp);
3525
3526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3528 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3529 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3530
3531 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3532 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3533 IEM_MC_FPU_TO_MMX_MODE();
3534
3535 IEM_MC_ADVANCE_RIP();
3536 IEM_MC_END();
3537 }
3538 }
3539 else
3540 {
3541 /**
3542 * @opdone
3543 * @opcode 0x6e
3544 * @opcodesub rex.w=0
3545 * @oppfx none
3546 * @opcpuid mmx
3547 * @opgroup og_mmx_datamove
3548 * @opxcpttype 5
3549 * @opfunction iemOp_movd_q_Pd_Ey
3550 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3551 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3552 */
3553 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3554 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3555 {
3556 /* MMX, greg */
3557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3558 IEM_MC_BEGIN(0, 1);
3559 IEM_MC_LOCAL(uint64_t, u64Tmp);
3560
3561 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3562 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3563
3564 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3565 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3566 IEM_MC_FPU_TO_MMX_MODE();
3567
3568 IEM_MC_ADVANCE_RIP();
3569 IEM_MC_END();
3570 }
3571 else
3572 {
3573 /* MMX, [mem] */
3574 IEM_MC_BEGIN(0, 2);
3575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3576 IEM_MC_LOCAL(uint32_t, u32Tmp);
3577
3578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3580 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3581 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3582
3583 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3584 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3585 IEM_MC_FPU_TO_MMX_MODE();
3586
3587 IEM_MC_ADVANCE_RIP();
3588 IEM_MC_END();
3589 }
3590 }
3591 return VINF_SUCCESS;
3592}
3593
3594FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3595{
3596 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3597 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3598 {
3599 /**
3600 * @opcode 0x6e
3601 * @opcodesub rex.w=1
3602 * @oppfx 0x66
3603 * @opcpuid sse2
3604 * @opgroup og_sse2_simdint_datamove
3605 * @opxcpttype 5
3606 * @optest 64-bit / op1=1 op2=2 -> op1=2
3607 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3608 */
3609 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3610 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3611 {
3612 /* XMM, greg64 */
3613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3614 IEM_MC_BEGIN(0, 1);
3615 IEM_MC_LOCAL(uint64_t, u64Tmp);
3616
3617 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3618 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3619
3620 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3621 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3622
3623 IEM_MC_ADVANCE_RIP();
3624 IEM_MC_END();
3625 }
3626 else
3627 {
3628 /* XMM, [mem64] */
3629 IEM_MC_BEGIN(0, 2);
3630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3631 IEM_MC_LOCAL(uint64_t, u64Tmp);
3632
3633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3635 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3636 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3637
3638 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3639 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3640
3641 IEM_MC_ADVANCE_RIP();
3642 IEM_MC_END();
3643 }
3644 }
3645 else
3646 {
3647 /**
3648 * @opdone
3649 * @opcode 0x6e
3650 * @opcodesub rex.w=0
3651 * @oppfx 0x66
3652 * @opcpuid sse2
3653 * @opgroup og_sse2_simdint_datamove
3654 * @opxcpttype 5
3655 * @opfunction iemOp_movd_q_Vy_Ey
3656 * @optest op1=1 op2=2 -> op1=2
3657 * @optest op1=0 op2=-42 -> op1=-42
3658 */
3659 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3660 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3661 {
3662 /* XMM, greg32 */
3663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3664 IEM_MC_BEGIN(0, 1);
3665 IEM_MC_LOCAL(uint32_t, u32Tmp);
3666
3667 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3668 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3669
3670 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3671 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3672
3673 IEM_MC_ADVANCE_RIP();
3674 IEM_MC_END();
3675 }
3676 else
3677 {
3678 /* XMM, [mem32] */
3679 IEM_MC_BEGIN(0, 2);
3680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3681 IEM_MC_LOCAL(uint32_t, u32Tmp);
3682
3683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3685 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3686 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3687
3688 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3689 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3690
3691 IEM_MC_ADVANCE_RIP();
3692 IEM_MC_END();
3693 }
3694 }
3695 return VINF_SUCCESS;
3696}
3697
3698/* Opcode 0xf3 0x0f 0x6e - invalid */
3699
3700
3701/**
3702 * @opcode 0x6f
3703 * @oppfx none
3704 * @opcpuid mmx
3705 * @opgroup og_mmx_datamove
3706 * @opxcpttype 5
3707 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3708 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3709 */
3710FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3711{
3712 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3713 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3714 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3715 {
3716 /*
3717 * Register, register.
3718 */
3719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3720 IEM_MC_BEGIN(0, 1);
3721 IEM_MC_LOCAL(uint64_t, u64Tmp);
3722
3723 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3724 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3725
3726 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3727 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3728 IEM_MC_FPU_TO_MMX_MODE();
3729
3730 IEM_MC_ADVANCE_RIP();
3731 IEM_MC_END();
3732 }
3733 else
3734 {
3735 /*
3736 * Register, memory.
3737 */
3738 IEM_MC_BEGIN(0, 2);
3739 IEM_MC_LOCAL(uint64_t, u64Tmp);
3740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3741
3742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3744 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3745 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3746
3747 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3748 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3749 IEM_MC_FPU_TO_MMX_MODE();
3750
3751 IEM_MC_ADVANCE_RIP();
3752 IEM_MC_END();
3753 }
3754 return VINF_SUCCESS;
3755}
3756
3757/**
3758 * @opcode 0x6f
3759 * @oppfx 0x66
3760 * @opcpuid sse2
3761 * @opgroup og_sse2_simdint_datamove
3762 * @opxcpttype 1
3763 * @optest op1=1 op2=2 -> op1=2
3764 * @optest op1=0 op2=-42 -> op1=-42
3765 */
3766FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
3767{
3768 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3769 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3770 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3771 {
3772 /*
3773 * Register, register.
3774 */
3775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3776 IEM_MC_BEGIN(0, 0);
3777
3778 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3779 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3780
3781 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3782 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3783 IEM_MC_ADVANCE_RIP();
3784 IEM_MC_END();
3785 }
3786 else
3787 {
3788 /*
3789 * Register, memory.
3790 */
3791 IEM_MC_BEGIN(0, 2);
3792 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3794
3795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3797 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3798 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3799
3800 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3801 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3802
3803 IEM_MC_ADVANCE_RIP();
3804 IEM_MC_END();
3805 }
3806 return VINF_SUCCESS;
3807}
3808
3809/**
3810 * @opcode 0x6f
3811 * @oppfx 0xf3
3812 * @opcpuid sse2
3813 * @opgroup og_sse2_simdint_datamove
3814 * @opxcpttype 4UA
3815 * @optest op1=1 op2=2 -> op1=2
3816 * @optest op1=0 op2=-42 -> op1=-42
3817 */
3818FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
3819{
3820 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3821 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3822 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3823 {
3824 /*
3825 * Register, register.
3826 */
3827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3828 IEM_MC_BEGIN(0, 0);
3829 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3830 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3831 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3832 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3833 IEM_MC_ADVANCE_RIP();
3834 IEM_MC_END();
3835 }
3836 else
3837 {
3838 /*
3839 * Register, memory.
3840 */
3841 IEM_MC_BEGIN(0, 2);
3842 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3843 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3844
3845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3847 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3848 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3849 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3850 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3851
3852 IEM_MC_ADVANCE_RIP();
3853 IEM_MC_END();
3854 }
3855 return VINF_SUCCESS;
3856}
3857
3858
3859/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3860FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3861{
3862 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3863 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3864 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3865 {
3866 /*
3867 * Register, register.
3868 */
3869 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3871
3872 IEM_MC_BEGIN(3, 0);
3873 IEM_MC_ARG(uint64_t *, pDst, 0);
3874 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3875 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3876 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3877 IEM_MC_PREPARE_FPU_USAGE();
3878 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3879 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3880 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3881 IEM_MC_ADVANCE_RIP();
3882 IEM_MC_END();
3883 }
3884 else
3885 {
3886 /*
3887 * Register, memory.
3888 */
3889 IEM_MC_BEGIN(3, 2);
3890 IEM_MC_ARG(uint64_t *, pDst, 0);
3891 IEM_MC_LOCAL(uint64_t, uSrc);
3892 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3893 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3894
3895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3896 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3897 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3899 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3900
3901 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3902 IEM_MC_PREPARE_FPU_USAGE();
3903 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3904 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3905
3906 IEM_MC_ADVANCE_RIP();
3907 IEM_MC_END();
3908 }
3909 return VINF_SUCCESS;
3910}
3911
3912/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3913FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3914{
3915 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3916 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3917 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3918 {
3919 /*
3920 * Register, register.
3921 */
3922 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3924
3925 IEM_MC_BEGIN(3, 0);
3926 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3927 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3928 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3929 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3930 IEM_MC_PREPARE_SSE_USAGE();
3931 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3932 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3933 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3934 IEM_MC_ADVANCE_RIP();
3935 IEM_MC_END();
3936 }
3937 else
3938 {
3939 /*
3940 * Register, memory.
3941 */
3942 IEM_MC_BEGIN(3, 2);
3943 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3944 IEM_MC_LOCAL(RTUINT128U, uSrc);
3945 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3947
3948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3949 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3950 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3952 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3953
3954 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3955 IEM_MC_PREPARE_SSE_USAGE();
3956 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3957 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3958
3959 IEM_MC_ADVANCE_RIP();
3960 IEM_MC_END();
3961 }
3962 return VINF_SUCCESS;
3963}
3964
3965/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3966FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3967{
3968 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3969 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3970 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3971 {
3972 /*
3973 * Register, register.
3974 */
3975 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3977
3978 IEM_MC_BEGIN(3, 0);
3979 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3980 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3981 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3982 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3983 IEM_MC_PREPARE_SSE_USAGE();
3984 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3985 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3986 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3987 IEM_MC_ADVANCE_RIP();
3988 IEM_MC_END();
3989 }
3990 else
3991 {
3992 /*
3993 * Register, memory.
3994 */
3995 IEM_MC_BEGIN(3, 2);
3996 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3997 IEM_MC_LOCAL(RTUINT128U, uSrc);
3998 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4000
4001 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4002 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4003 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4005 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4006
4007 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4008 IEM_MC_PREPARE_SSE_USAGE();
4009 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4010 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
4011
4012 IEM_MC_ADVANCE_RIP();
4013 IEM_MC_END();
4014 }
4015 return VINF_SUCCESS;
4016}
4017
4018/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
4019FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
4020{
4021 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
4022 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4023 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4024 {
4025 /*
4026 * Register, register.
4027 */
4028 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4030
4031 IEM_MC_BEGIN(3, 0);
4032 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4033 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4034 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4035 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4036 IEM_MC_PREPARE_SSE_USAGE();
4037 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4038 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4039 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
4040 IEM_MC_ADVANCE_RIP();
4041 IEM_MC_END();
4042 }
4043 else
4044 {
4045 /*
4046 * Register, memory.
4047 */
4048 IEM_MC_BEGIN(3, 2);
4049 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4050 IEM_MC_LOCAL(RTUINT128U, uSrc);
4051 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4053
4054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4055 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4056 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4058 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4059
4060 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4061 IEM_MC_PREPARE_SSE_USAGE();
4062 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4063 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
4064
4065 IEM_MC_ADVANCE_RIP();
4066 IEM_MC_END();
4067 }
4068 return VINF_SUCCESS;
4069}
4070
4071
4072/** Opcode 0x0f 0x71 11/2. */
4073FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
4074
4075/** Opcode 0x66 0x0f 0x71 11/2. */
4076FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
4077
4078/** Opcode 0x0f 0x71 11/4. */
4079FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
4080
4081/** Opcode 0x66 0x0f 0x71 11/4. */
4082FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
4083
4084/** Opcode 0x0f 0x71 11/6. */
4085FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
4086
4087/** Opcode 0x66 0x0f 0x71 11/6. */
4088FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
4089
4090
4091/**
4092 * Group 12 jump table for register variant.
4093 */
4094IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
4095{
4096 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4097 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4098 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4099 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4100 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4101 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4102 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4103 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4104};
4105AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
4106
4107
4108/** Opcode 0x0f 0x71. */
4109FNIEMOP_DEF(iemOp_Grp12)
4110{
4111 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4112 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4113 /* register, register */
4114 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4115 + pVCpu->iem.s.idxPrefix], bRm);
4116 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4117}
4118
4119
4120/** Opcode 0x0f 0x72 11/2. */
4121FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
4122
4123/** Opcode 0x66 0x0f 0x72 11/2. */
4124FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
4125
4126/** Opcode 0x0f 0x72 11/4. */
4127FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
4128
4129/** Opcode 0x66 0x0f 0x72 11/4. */
4130FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
4131
4132/** Opcode 0x0f 0x72 11/6. */
4133FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
4134
4135/** Opcode 0x66 0x0f 0x72 11/6. */
4136FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
4137
4138
4139/**
4140 * Group 13 jump table for register variant.
4141 */
4142IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
4143{
4144 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4145 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4146 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4147 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4148 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4149 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4150 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4151 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4152};
4153AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
4154
4155/** Opcode 0x0f 0x72. */
4156FNIEMOP_DEF(iemOp_Grp13)
4157{
4158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4159 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4160 /* register, register */
4161 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4162 + pVCpu->iem.s.idxPrefix], bRm);
4163 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4164}
4165
4166
4167/** Opcode 0x0f 0x73 11/2. */
4168FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
4169
4170/** Opcode 0x66 0x0f 0x73 11/2. */
4171FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
4172
4173/** Opcode 0x66 0x0f 0x73 11/3. */
4174FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
4175
4176/** Opcode 0x0f 0x73 11/6. */
4177FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
4178
4179/** Opcode 0x66 0x0f 0x73 11/6. */
4180FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
4181
4182/** Opcode 0x66 0x0f 0x73 11/7. */
4183FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
4184
4185/**
4186 * Group 14 jump table for register variant.
4187 */
4188IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4189{
4190 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4191 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4192 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4193 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4194 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4195 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4196 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4197 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4198};
4199AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4200
4201
4202/** Opcode 0x0f 0x73. */
4203FNIEMOP_DEF(iemOp_Grp14)
4204{
4205 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4206 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4207 /* register, register */
4208 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4209 + pVCpu->iem.s.idxPrefix], bRm);
4210 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4211}
4212
4213
4214/**
4215 * Common worker for MMX instructions on the form:
4216 * pxxx mm1, mm2/mem64
4217 */
4218FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4219{
4220 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4221 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4222 {
4223 /*
4224 * Register, register.
4225 */
4226 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4227 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4229 IEM_MC_BEGIN(2, 0);
4230 IEM_MC_ARG(uint64_t *, pDst, 0);
4231 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4232 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4233 IEM_MC_PREPARE_FPU_USAGE();
4234 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4235 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
4236 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4237 IEM_MC_ADVANCE_RIP();
4238 IEM_MC_END();
4239 }
4240 else
4241 {
4242 /*
4243 * Register, memory.
4244 */
4245 IEM_MC_BEGIN(2, 2);
4246 IEM_MC_ARG(uint64_t *, pDst, 0);
4247 IEM_MC_LOCAL(uint64_t, uSrc);
4248 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4250
4251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4253 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4254 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4255
4256 IEM_MC_PREPARE_FPU_USAGE();
4257 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4258 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4259
4260 IEM_MC_ADVANCE_RIP();
4261 IEM_MC_END();
4262 }
4263 return VINF_SUCCESS;
4264}
4265
4266
4267/**
4268 * Common worker for SSE2 instructions on the forms:
4269 * pxxx xmm1, xmm2/mem128
4270 *
4271 * Proper alignment of the 128-bit operand is enforced.
4272 * Exceptions type 4. SSE2 cpuid checks.
4273 */
4274FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4275{
4276 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4277 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4278 {
4279 /*
4280 * Register, register.
4281 */
4282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4283 IEM_MC_BEGIN(2, 0);
4284 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4285 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4286 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4287 IEM_MC_PREPARE_SSE_USAGE();
4288 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4289 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4290 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4291 IEM_MC_ADVANCE_RIP();
4292 IEM_MC_END();
4293 }
4294 else
4295 {
4296 /*
4297 * Register, memory.
4298 */
4299 IEM_MC_BEGIN(2, 2);
4300 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4301 IEM_MC_LOCAL(RTUINT128U, uSrc);
4302 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4304
4305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4307 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4308 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4309
4310 IEM_MC_PREPARE_SSE_USAGE();
4311 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4312 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4313
4314 IEM_MC_ADVANCE_RIP();
4315 IEM_MC_END();
4316 }
4317 return VINF_SUCCESS;
4318}
4319
4320
4321/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4322FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4323{
4324 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
4325 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4326}
4327
4328/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4329FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4330{
4331 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
4332 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4333}
4334
4335/* Opcode 0xf3 0x0f 0x74 - invalid */
4336/* Opcode 0xf2 0x0f 0x74 - invalid */
4337
4338
4339/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4340FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4341{
4342 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
4343 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4344}
4345
4346/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4347FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4348{
4349 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
4350 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4351}
4352
4353/* Opcode 0xf3 0x0f 0x75 - invalid */
4354/* Opcode 0xf2 0x0f 0x75 - invalid */
4355
4356
4357/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4358FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4359{
4360 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
4361 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4362}
4363
4364/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4365FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4366{
4367 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
4368 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4369}
4370
4371/* Opcode 0xf3 0x0f 0x76 - invalid */
4372/* Opcode 0xf2 0x0f 0x76 - invalid */
4373
4374
4375/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4376FNIEMOP_DEF(iemOp_emms)
4377{
4378 IEMOP_MNEMONIC(emms, "emms");
4379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4380
4381 IEM_MC_BEGIN(0,0);
4382 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
4383 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4384 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4385 IEM_MC_FPU_FROM_MMX_MODE();
4386 IEM_MC_ADVANCE_RIP();
4387 IEM_MC_END();
4388 return VINF_SUCCESS;
4389}
4390
4391/* Opcode 0x66 0x0f 0x77 - invalid */
4392/* Opcode 0xf3 0x0f 0x77 - invalid */
4393/* Opcode 0xf2 0x0f 0x77 - invalid */
4394
4395/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4396FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4397/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4398FNIEMOP_STUB(iemOp_AmdGrp17);
4399/* Opcode 0xf3 0x0f 0x78 - invalid */
4400/* Opcode 0xf2 0x0f 0x78 - invalid */
4401
4402/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4403FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4404/* Opcode 0x66 0x0f 0x79 - invalid */
4405/* Opcode 0xf3 0x0f 0x79 - invalid */
4406/* Opcode 0xf2 0x0f 0x79 - invalid */
4407
4408/* Opcode 0x0f 0x7a - invalid */
4409/* Opcode 0x66 0x0f 0x7a - invalid */
4410/* Opcode 0xf3 0x0f 0x7a - invalid */
4411/* Opcode 0xf2 0x0f 0x7a - invalid */
4412
4413/* Opcode 0x0f 0x7b - invalid */
4414/* Opcode 0x66 0x0f 0x7b - invalid */
4415/* Opcode 0xf3 0x0f 0x7b - invalid */
4416/* Opcode 0xf2 0x0f 0x7b - invalid */
4417
4418/* Opcode 0x0f 0x7c - invalid */
4419/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4420FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4421/* Opcode 0xf3 0x0f 0x7c - invalid */
4422/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4423FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4424
4425/* Opcode 0x0f 0x7d - invalid */
4426/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4427FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4428/* Opcode 0xf3 0x0f 0x7d - invalid */
4429/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4430FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4431
4432
4433/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4434FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4435{
4436 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4437 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4438 {
4439 /**
4440 * @opcode 0x7e
4441 * @opcodesub rex.w=1
4442 * @oppfx none
4443 * @opcpuid mmx
4444 * @opgroup og_mmx_datamove
4445 * @opxcpttype 5
4446 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
4447 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
4448 */
4449 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4450 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4451 {
4452 /* greg64, MMX */
4453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4454 IEM_MC_BEGIN(0, 1);
4455 IEM_MC_LOCAL(uint64_t, u64Tmp);
4456
4457 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4458 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4459
4460 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4461 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4462 IEM_MC_FPU_TO_MMX_MODE();
4463
4464 IEM_MC_ADVANCE_RIP();
4465 IEM_MC_END();
4466 }
4467 else
4468 {
4469 /* [mem64], MMX */
4470 IEM_MC_BEGIN(0, 2);
4471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4472 IEM_MC_LOCAL(uint64_t, u64Tmp);
4473
4474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4476 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4477 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4478
4479 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4480 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4481 IEM_MC_FPU_TO_MMX_MODE();
4482
4483 IEM_MC_ADVANCE_RIP();
4484 IEM_MC_END();
4485 }
4486 }
4487 else
4488 {
4489 /**
4490 * @opdone
4491 * @opcode 0x7e
4492 * @opcodesub rex.w=0
4493 * @oppfx none
4494 * @opcpuid mmx
4495 * @opgroup og_mmx_datamove
4496 * @opxcpttype 5
4497 * @opfunction iemOp_movd_q_Pd_Ey
4498 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4499 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4500 */
4501 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4502 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4503 {
4504 /* greg32, MMX */
4505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4506 IEM_MC_BEGIN(0, 1);
4507 IEM_MC_LOCAL(uint32_t, u32Tmp);
4508
4509 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4510 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4511
4512 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4513 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4514 IEM_MC_FPU_TO_MMX_MODE();
4515
4516 IEM_MC_ADVANCE_RIP();
4517 IEM_MC_END();
4518 }
4519 else
4520 {
4521 /* [mem32], MMX */
4522 IEM_MC_BEGIN(0, 2);
4523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4524 IEM_MC_LOCAL(uint32_t, u32Tmp);
4525
4526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4528 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4529 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4530
4531 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4532 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4533 IEM_MC_FPU_TO_MMX_MODE();
4534
4535 IEM_MC_ADVANCE_RIP();
4536 IEM_MC_END();
4537 }
4538 }
4539 return VINF_SUCCESS;
4540
4541}
4542
4543
4544FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4545{
4546 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4547 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4548 {
4549 /**
4550 * @opcode 0x7e
4551 * @opcodesub rex.w=1
4552 * @oppfx 0x66
4553 * @opcpuid sse2
4554 * @opgroup og_sse2_simdint_datamove
4555 * @opxcpttype 5
4556 * @optest 64-bit / op1=1 op2=2 -> op1=2
4557 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4558 */
4559 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4560 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4561 {
4562 /* greg64, XMM */
4563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4564 IEM_MC_BEGIN(0, 1);
4565 IEM_MC_LOCAL(uint64_t, u64Tmp);
4566
4567 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4568 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4569
4570 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4571 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4572
4573 IEM_MC_ADVANCE_RIP();
4574 IEM_MC_END();
4575 }
4576 else
4577 {
4578 /* [mem64], XMM */
4579 IEM_MC_BEGIN(0, 2);
4580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4581 IEM_MC_LOCAL(uint64_t, u64Tmp);
4582
4583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4585 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4586 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4587
4588 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4589 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4590
4591 IEM_MC_ADVANCE_RIP();
4592 IEM_MC_END();
4593 }
4594 }
4595 else
4596 {
4597 /**
4598 * @opdone
4599 * @opcode 0x7e
4600 * @opcodesub rex.w=0
4601 * @oppfx 0x66
4602 * @opcpuid sse2
4603 * @opgroup og_sse2_simdint_datamove
4604 * @opxcpttype 5
4605 * @opfunction iemOp_movd_q_Vy_Ey
4606 * @optest op1=1 op2=2 -> op1=2
4607 * @optest op1=0 op2=-42 -> op1=-42
4608 */
4609 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4610 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4611 {
4612 /* greg32, XMM */
4613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4614 IEM_MC_BEGIN(0, 1);
4615 IEM_MC_LOCAL(uint32_t, u32Tmp);
4616
4617 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4618 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4619
4620 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4621 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4622
4623 IEM_MC_ADVANCE_RIP();
4624 IEM_MC_END();
4625 }
4626 else
4627 {
4628 /* [mem32], XMM */
4629 IEM_MC_BEGIN(0, 2);
4630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4631 IEM_MC_LOCAL(uint32_t, u32Tmp);
4632
4633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4635 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4636 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4637
4638 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4639 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4640
4641 IEM_MC_ADVANCE_RIP();
4642 IEM_MC_END();
4643 }
4644 }
4645 return VINF_SUCCESS;
4646
4647}
4648
4649/**
4650 * @opcode 0x7e
4651 * @oppfx 0xf3
4652 * @opcpuid sse2
4653 * @opgroup og_sse2_pcksclr_datamove
4654 * @opxcpttype none
4655 * @optest op1=1 op2=2 -> op1=2
4656 * @optest op1=0 op2=-42 -> op1=-42
4657 */
4658FNIEMOP_DEF(iemOp_movq_Vq_Wq)
4659{
4660 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4661 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4662 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4663 {
4664 /*
4665 * Register, register.
4666 */
4667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4668 IEM_MC_BEGIN(0, 2);
4669 IEM_MC_LOCAL(uint64_t, uSrc);
4670
4671 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4672 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4673
4674 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4675 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4676
4677 IEM_MC_ADVANCE_RIP();
4678 IEM_MC_END();
4679 }
4680 else
4681 {
4682 /*
4683 * Memory, register.
4684 */
4685 IEM_MC_BEGIN(0, 2);
4686 IEM_MC_LOCAL(uint64_t, uSrc);
4687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4688
4689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4691 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4692 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4693
4694 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4695 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4696
4697 IEM_MC_ADVANCE_RIP();
4698 IEM_MC_END();
4699 }
4700 return VINF_SUCCESS;
4701}
4702
4703/* Opcode 0xf2 0x0f 0x7e - invalid */
4704
4705
4706/** Opcode 0x0f 0x7f - movq Qq, Pq */
4707FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4708{
4709 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4710 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4711 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4712 {
4713 /*
4714 * Register, register.
4715 */
4716 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4717 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4719 IEM_MC_BEGIN(0, 1);
4720 IEM_MC_LOCAL(uint64_t, u64Tmp);
4721 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4722 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4723 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4724 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4725 IEM_MC_ADVANCE_RIP();
4726 IEM_MC_END();
4727 }
4728 else
4729 {
4730 /*
4731 * Register, memory.
4732 */
4733 IEM_MC_BEGIN(0, 2);
4734 IEM_MC_LOCAL(uint64_t, u64Tmp);
4735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4736
4737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4739 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4740 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4741
4742 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4743 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4744
4745 IEM_MC_ADVANCE_RIP();
4746 IEM_MC_END();
4747 }
4748 return VINF_SUCCESS;
4749}
4750
4751/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4752FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4753{
4754 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4755 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4756 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4757 {
4758 /*
4759 * Register, register.
4760 */
4761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4762 IEM_MC_BEGIN(0, 0);
4763 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4764 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4765 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4766 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4767 IEM_MC_ADVANCE_RIP();
4768 IEM_MC_END();
4769 }
4770 else
4771 {
4772 /*
4773 * Register, memory.
4774 */
4775 IEM_MC_BEGIN(0, 2);
4776 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4777 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4778
4779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4781 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4782 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4783
4784 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4785 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4786
4787 IEM_MC_ADVANCE_RIP();
4788 IEM_MC_END();
4789 }
4790 return VINF_SUCCESS;
4791}
4792
4793/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4794FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4795{
4796 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4797 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4798 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4799 {
4800 /*
4801 * Register, register.
4802 */
4803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4804 IEM_MC_BEGIN(0, 0);
4805 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4806 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4807 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4808 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4809 IEM_MC_ADVANCE_RIP();
4810 IEM_MC_END();
4811 }
4812 else
4813 {
4814 /*
4815 * Register, memory.
4816 */
4817 IEM_MC_BEGIN(0, 2);
4818 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4820
4821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4823 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4824 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4825
4826 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4827 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4828
4829 IEM_MC_ADVANCE_RIP();
4830 IEM_MC_END();
4831 }
4832 return VINF_SUCCESS;
4833}
4834
4835/* Opcode 0xf2 0x0f 0x7f - invalid */
4836
4837
4838
4839/** Opcode 0x0f 0x80. */
4840FNIEMOP_DEF(iemOp_jo_Jv)
4841{
4842 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4843 IEMOP_HLP_MIN_386();
4844 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4845 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4846 {
4847 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4849
4850 IEM_MC_BEGIN(0, 0);
4851 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4852 IEM_MC_REL_JMP_S16(i16Imm);
4853 } IEM_MC_ELSE() {
4854 IEM_MC_ADVANCE_RIP();
4855 } IEM_MC_ENDIF();
4856 IEM_MC_END();
4857 }
4858 else
4859 {
4860 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4862
4863 IEM_MC_BEGIN(0, 0);
4864 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4865 IEM_MC_REL_JMP_S32(i32Imm);
4866 } IEM_MC_ELSE() {
4867 IEM_MC_ADVANCE_RIP();
4868 } IEM_MC_ENDIF();
4869 IEM_MC_END();
4870 }
4871 return VINF_SUCCESS;
4872}
4873
4874
4875/** Opcode 0x0f 0x81. */
4876FNIEMOP_DEF(iemOp_jno_Jv)
4877{
4878 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4879 IEMOP_HLP_MIN_386();
4880 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4881 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4882 {
4883 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4885
4886 IEM_MC_BEGIN(0, 0);
4887 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4888 IEM_MC_ADVANCE_RIP();
4889 } IEM_MC_ELSE() {
4890 IEM_MC_REL_JMP_S16(i16Imm);
4891 } IEM_MC_ENDIF();
4892 IEM_MC_END();
4893 }
4894 else
4895 {
4896 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4898
4899 IEM_MC_BEGIN(0, 0);
4900 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4901 IEM_MC_ADVANCE_RIP();
4902 } IEM_MC_ELSE() {
4903 IEM_MC_REL_JMP_S32(i32Imm);
4904 } IEM_MC_ENDIF();
4905 IEM_MC_END();
4906 }
4907 return VINF_SUCCESS;
4908}
4909
4910
4911/** Opcode 0x0f 0x82. */
4912FNIEMOP_DEF(iemOp_jc_Jv)
4913{
4914 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4915 IEMOP_HLP_MIN_386();
4916 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4917 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4918 {
4919 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4921
4922 IEM_MC_BEGIN(0, 0);
4923 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4924 IEM_MC_REL_JMP_S16(i16Imm);
4925 } IEM_MC_ELSE() {
4926 IEM_MC_ADVANCE_RIP();
4927 } IEM_MC_ENDIF();
4928 IEM_MC_END();
4929 }
4930 else
4931 {
4932 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4934
4935 IEM_MC_BEGIN(0, 0);
4936 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4937 IEM_MC_REL_JMP_S32(i32Imm);
4938 } IEM_MC_ELSE() {
4939 IEM_MC_ADVANCE_RIP();
4940 } IEM_MC_ENDIF();
4941 IEM_MC_END();
4942 }
4943 return VINF_SUCCESS;
4944}
4945
4946
4947/** Opcode 0x0f 0x83. */
4948FNIEMOP_DEF(iemOp_jnc_Jv)
4949{
4950 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4951 IEMOP_HLP_MIN_386();
4952 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4953 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4954 {
4955 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4957
4958 IEM_MC_BEGIN(0, 0);
4959 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4960 IEM_MC_ADVANCE_RIP();
4961 } IEM_MC_ELSE() {
4962 IEM_MC_REL_JMP_S16(i16Imm);
4963 } IEM_MC_ENDIF();
4964 IEM_MC_END();
4965 }
4966 else
4967 {
4968 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4970
4971 IEM_MC_BEGIN(0, 0);
4972 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4973 IEM_MC_ADVANCE_RIP();
4974 } IEM_MC_ELSE() {
4975 IEM_MC_REL_JMP_S32(i32Imm);
4976 } IEM_MC_ENDIF();
4977 IEM_MC_END();
4978 }
4979 return VINF_SUCCESS;
4980}
4981
4982
4983/** Opcode 0x0f 0x84. */
4984FNIEMOP_DEF(iemOp_je_Jv)
4985{
4986 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4987 IEMOP_HLP_MIN_386();
4988 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4989 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4990 {
4991 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4993
4994 IEM_MC_BEGIN(0, 0);
4995 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4996 IEM_MC_REL_JMP_S16(i16Imm);
4997 } IEM_MC_ELSE() {
4998 IEM_MC_ADVANCE_RIP();
4999 } IEM_MC_ENDIF();
5000 IEM_MC_END();
5001 }
5002 else
5003 {
5004 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5006
5007 IEM_MC_BEGIN(0, 0);
5008 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5009 IEM_MC_REL_JMP_S32(i32Imm);
5010 } IEM_MC_ELSE() {
5011 IEM_MC_ADVANCE_RIP();
5012 } IEM_MC_ENDIF();
5013 IEM_MC_END();
5014 }
5015 return VINF_SUCCESS;
5016}
5017
5018
5019/** Opcode 0x0f 0x85. */
5020FNIEMOP_DEF(iemOp_jne_Jv)
5021{
5022 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
5023 IEMOP_HLP_MIN_386();
5024 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5025 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5026 {
5027 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5029
5030 IEM_MC_BEGIN(0, 0);
5031 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5032 IEM_MC_ADVANCE_RIP();
5033 } IEM_MC_ELSE() {
5034 IEM_MC_REL_JMP_S16(i16Imm);
5035 } IEM_MC_ENDIF();
5036 IEM_MC_END();
5037 }
5038 else
5039 {
5040 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5042
5043 IEM_MC_BEGIN(0, 0);
5044 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5045 IEM_MC_ADVANCE_RIP();
5046 } IEM_MC_ELSE() {
5047 IEM_MC_REL_JMP_S32(i32Imm);
5048 } IEM_MC_ENDIF();
5049 IEM_MC_END();
5050 }
5051 return VINF_SUCCESS;
5052}
5053
5054
5055/** Opcode 0x0f 0x86. */
5056FNIEMOP_DEF(iemOp_jbe_Jv)
5057{
5058 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
5059 IEMOP_HLP_MIN_386();
5060 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5061 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5062 {
5063 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5065
5066 IEM_MC_BEGIN(0, 0);
5067 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5068 IEM_MC_REL_JMP_S16(i16Imm);
5069 } IEM_MC_ELSE() {
5070 IEM_MC_ADVANCE_RIP();
5071 } IEM_MC_ENDIF();
5072 IEM_MC_END();
5073 }
5074 else
5075 {
5076 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5078
5079 IEM_MC_BEGIN(0, 0);
5080 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5081 IEM_MC_REL_JMP_S32(i32Imm);
5082 } IEM_MC_ELSE() {
5083 IEM_MC_ADVANCE_RIP();
5084 } IEM_MC_ENDIF();
5085 IEM_MC_END();
5086 }
5087 return VINF_SUCCESS;
5088}
5089
5090
5091/** Opcode 0x0f 0x87. */
5092FNIEMOP_DEF(iemOp_jnbe_Jv)
5093{
5094 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
5095 IEMOP_HLP_MIN_386();
5096 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5097 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5098 {
5099 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5101
5102 IEM_MC_BEGIN(0, 0);
5103 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5104 IEM_MC_ADVANCE_RIP();
5105 } IEM_MC_ELSE() {
5106 IEM_MC_REL_JMP_S16(i16Imm);
5107 } IEM_MC_ENDIF();
5108 IEM_MC_END();
5109 }
5110 else
5111 {
5112 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5114
5115 IEM_MC_BEGIN(0, 0);
5116 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5117 IEM_MC_ADVANCE_RIP();
5118 } IEM_MC_ELSE() {
5119 IEM_MC_REL_JMP_S32(i32Imm);
5120 } IEM_MC_ENDIF();
5121 IEM_MC_END();
5122 }
5123 return VINF_SUCCESS;
5124}
5125
5126
5127/** Opcode 0x0f 0x88. */
5128FNIEMOP_DEF(iemOp_js_Jv)
5129{
5130 IEMOP_MNEMONIC(js_Jv, "js Jv");
5131 IEMOP_HLP_MIN_386();
5132 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5133 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5134 {
5135 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5137
5138 IEM_MC_BEGIN(0, 0);
5139 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5140 IEM_MC_REL_JMP_S16(i16Imm);
5141 } IEM_MC_ELSE() {
5142 IEM_MC_ADVANCE_RIP();
5143 } IEM_MC_ENDIF();
5144 IEM_MC_END();
5145 }
5146 else
5147 {
5148 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5150
5151 IEM_MC_BEGIN(0, 0);
5152 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5153 IEM_MC_REL_JMP_S32(i32Imm);
5154 } IEM_MC_ELSE() {
5155 IEM_MC_ADVANCE_RIP();
5156 } IEM_MC_ENDIF();
5157 IEM_MC_END();
5158 }
5159 return VINF_SUCCESS;
5160}
5161
5162
5163/** Opcode 0x0f 0x89. */
5164FNIEMOP_DEF(iemOp_jns_Jv)
5165{
5166 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
5167 IEMOP_HLP_MIN_386();
5168 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5169 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5170 {
5171 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5173
5174 IEM_MC_BEGIN(0, 0);
5175 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5176 IEM_MC_ADVANCE_RIP();
5177 } IEM_MC_ELSE() {
5178 IEM_MC_REL_JMP_S16(i16Imm);
5179 } IEM_MC_ENDIF();
5180 IEM_MC_END();
5181 }
5182 else
5183 {
5184 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5186
5187 IEM_MC_BEGIN(0, 0);
5188 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5189 IEM_MC_ADVANCE_RIP();
5190 } IEM_MC_ELSE() {
5191 IEM_MC_REL_JMP_S32(i32Imm);
5192 } IEM_MC_ENDIF();
5193 IEM_MC_END();
5194 }
5195 return VINF_SUCCESS;
5196}
5197
5198
5199/** Opcode 0x0f 0x8a. */
5200FNIEMOP_DEF(iemOp_jp_Jv)
5201{
5202 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
5203 IEMOP_HLP_MIN_386();
5204 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5205 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5206 {
5207 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5209
5210 IEM_MC_BEGIN(0, 0);
5211 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5212 IEM_MC_REL_JMP_S16(i16Imm);
5213 } IEM_MC_ELSE() {
5214 IEM_MC_ADVANCE_RIP();
5215 } IEM_MC_ENDIF();
5216 IEM_MC_END();
5217 }
5218 else
5219 {
5220 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5222
5223 IEM_MC_BEGIN(0, 0);
5224 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5225 IEM_MC_REL_JMP_S32(i32Imm);
5226 } IEM_MC_ELSE() {
5227 IEM_MC_ADVANCE_RIP();
5228 } IEM_MC_ENDIF();
5229 IEM_MC_END();
5230 }
5231 return VINF_SUCCESS;
5232}
5233
5234
5235/** Opcode 0x0f 0x8b. */
5236FNIEMOP_DEF(iemOp_jnp_Jv)
5237{
5238 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
5239 IEMOP_HLP_MIN_386();
5240 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5241 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5242 {
5243 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5245
5246 IEM_MC_BEGIN(0, 0);
5247 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5248 IEM_MC_ADVANCE_RIP();
5249 } IEM_MC_ELSE() {
5250 IEM_MC_REL_JMP_S16(i16Imm);
5251 } IEM_MC_ENDIF();
5252 IEM_MC_END();
5253 }
5254 else
5255 {
5256 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5258
5259 IEM_MC_BEGIN(0, 0);
5260 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5261 IEM_MC_ADVANCE_RIP();
5262 } IEM_MC_ELSE() {
5263 IEM_MC_REL_JMP_S32(i32Imm);
5264 } IEM_MC_ENDIF();
5265 IEM_MC_END();
5266 }
5267 return VINF_SUCCESS;
5268}
5269
5270
5271/** Opcode 0x0f 0x8c. */
5272FNIEMOP_DEF(iemOp_jl_Jv)
5273{
5274 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
5275 IEMOP_HLP_MIN_386();
5276 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5277 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5278 {
5279 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5281
5282 IEM_MC_BEGIN(0, 0);
5283 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5284 IEM_MC_REL_JMP_S16(i16Imm);
5285 } IEM_MC_ELSE() {
5286 IEM_MC_ADVANCE_RIP();
5287 } IEM_MC_ENDIF();
5288 IEM_MC_END();
5289 }
5290 else
5291 {
5292 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5294
5295 IEM_MC_BEGIN(0, 0);
5296 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5297 IEM_MC_REL_JMP_S32(i32Imm);
5298 } IEM_MC_ELSE() {
5299 IEM_MC_ADVANCE_RIP();
5300 } IEM_MC_ENDIF();
5301 IEM_MC_END();
5302 }
5303 return VINF_SUCCESS;
5304}
5305
5306
5307/** Opcode 0x0f 0x8d. */
5308FNIEMOP_DEF(iemOp_jnl_Jv)
5309{
5310 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5311 IEMOP_HLP_MIN_386();
5312 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5313 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5314 {
5315 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5317
5318 IEM_MC_BEGIN(0, 0);
5319 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5320 IEM_MC_ADVANCE_RIP();
5321 } IEM_MC_ELSE() {
5322 IEM_MC_REL_JMP_S16(i16Imm);
5323 } IEM_MC_ENDIF();
5324 IEM_MC_END();
5325 }
5326 else
5327 {
5328 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5330
5331 IEM_MC_BEGIN(0, 0);
5332 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5333 IEM_MC_ADVANCE_RIP();
5334 } IEM_MC_ELSE() {
5335 IEM_MC_REL_JMP_S32(i32Imm);
5336 } IEM_MC_ENDIF();
5337 IEM_MC_END();
5338 }
5339 return VINF_SUCCESS;
5340}
5341
5342
5343/** Opcode 0x0f 0x8e. */
5344FNIEMOP_DEF(iemOp_jle_Jv)
5345{
5346 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5347 IEMOP_HLP_MIN_386();
5348 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5349 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5350 {
5351 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5353
5354 IEM_MC_BEGIN(0, 0);
5355 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5356 IEM_MC_REL_JMP_S16(i16Imm);
5357 } IEM_MC_ELSE() {
5358 IEM_MC_ADVANCE_RIP();
5359 } IEM_MC_ENDIF();
5360 IEM_MC_END();
5361 }
5362 else
5363 {
5364 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5366
5367 IEM_MC_BEGIN(0, 0);
5368 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5369 IEM_MC_REL_JMP_S32(i32Imm);
5370 } IEM_MC_ELSE() {
5371 IEM_MC_ADVANCE_RIP();
5372 } IEM_MC_ENDIF();
5373 IEM_MC_END();
5374 }
5375 return VINF_SUCCESS;
5376}
5377
5378
5379/** Opcode 0x0f 0x8f. */
5380FNIEMOP_DEF(iemOp_jnle_Jv)
5381{
5382 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5383 IEMOP_HLP_MIN_386();
5384 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5385 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5386 {
5387 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5389
5390 IEM_MC_BEGIN(0, 0);
5391 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5392 IEM_MC_ADVANCE_RIP();
5393 } IEM_MC_ELSE() {
5394 IEM_MC_REL_JMP_S16(i16Imm);
5395 } IEM_MC_ENDIF();
5396 IEM_MC_END();
5397 }
5398 else
5399 {
5400 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5402
5403 IEM_MC_BEGIN(0, 0);
5404 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5405 IEM_MC_ADVANCE_RIP();
5406 } IEM_MC_ELSE() {
5407 IEM_MC_REL_JMP_S32(i32Imm);
5408 } IEM_MC_ENDIF();
5409 IEM_MC_END();
5410 }
5411 return VINF_SUCCESS;
5412}
5413
5414
5415/** Opcode 0x0f 0x90. */
5416FNIEMOP_DEF(iemOp_seto_Eb)
5417{
5418 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5419 IEMOP_HLP_MIN_386();
5420 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5421
5422 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5423 * any way. AMD says it's "unused", whatever that means. We're
5424 * ignoring for now. */
5425 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5426 {
5427 /* register target */
5428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5429 IEM_MC_BEGIN(0, 0);
5430 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5431 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5432 } IEM_MC_ELSE() {
5433 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5434 } IEM_MC_ENDIF();
5435 IEM_MC_ADVANCE_RIP();
5436 IEM_MC_END();
5437 }
5438 else
5439 {
5440 /* memory target */
5441 IEM_MC_BEGIN(0, 1);
5442 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5445 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5446 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5447 } IEM_MC_ELSE() {
5448 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5449 } IEM_MC_ENDIF();
5450 IEM_MC_ADVANCE_RIP();
5451 IEM_MC_END();
5452 }
5453 return VINF_SUCCESS;
5454}
5455
5456
5457/** Opcode 0x0f 0x91. */
5458FNIEMOP_DEF(iemOp_setno_Eb)
5459{
5460 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5461 IEMOP_HLP_MIN_386();
5462 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5463
5464 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5465 * any way. AMD says it's "unused", whatever that means. We're
5466 * ignoring for now. */
5467 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5468 {
5469 /* register target */
5470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5471 IEM_MC_BEGIN(0, 0);
5472 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5473 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5474 } IEM_MC_ELSE() {
5475 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5476 } IEM_MC_ENDIF();
5477 IEM_MC_ADVANCE_RIP();
5478 IEM_MC_END();
5479 }
5480 else
5481 {
5482 /* memory target */
5483 IEM_MC_BEGIN(0, 1);
5484 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5487 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5488 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5489 } IEM_MC_ELSE() {
5490 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5491 } IEM_MC_ENDIF();
5492 IEM_MC_ADVANCE_RIP();
5493 IEM_MC_END();
5494 }
5495 return VINF_SUCCESS;
5496}
5497
5498
5499/** Opcode 0x0f 0x92. */
5500FNIEMOP_DEF(iemOp_setc_Eb)
5501{
5502 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5503 IEMOP_HLP_MIN_386();
5504 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5505
5506 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5507 * any way. AMD says it's "unused", whatever that means. We're
5508 * ignoring for now. */
5509 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5510 {
5511 /* register target */
5512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5513 IEM_MC_BEGIN(0, 0);
5514 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5515 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5516 } IEM_MC_ELSE() {
5517 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5518 } IEM_MC_ENDIF();
5519 IEM_MC_ADVANCE_RIP();
5520 IEM_MC_END();
5521 }
5522 else
5523 {
5524 /* memory target */
5525 IEM_MC_BEGIN(0, 1);
5526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5529 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5530 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5531 } IEM_MC_ELSE() {
5532 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5533 } IEM_MC_ENDIF();
5534 IEM_MC_ADVANCE_RIP();
5535 IEM_MC_END();
5536 }
5537 return VINF_SUCCESS;
5538}
5539
5540
5541/** Opcode 0x0f 0x93. */
5542FNIEMOP_DEF(iemOp_setnc_Eb)
5543{
5544 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5545 IEMOP_HLP_MIN_386();
5546 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5547
5548 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5549 * any way. AMD says it's "unused", whatever that means. We're
5550 * ignoring for now. */
5551 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5552 {
5553 /* register target */
5554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5555 IEM_MC_BEGIN(0, 0);
5556 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5557 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5558 } IEM_MC_ELSE() {
5559 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5560 } IEM_MC_ENDIF();
5561 IEM_MC_ADVANCE_RIP();
5562 IEM_MC_END();
5563 }
5564 else
5565 {
5566 /* memory target */
5567 IEM_MC_BEGIN(0, 1);
5568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5571 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5572 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5573 } IEM_MC_ELSE() {
5574 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5575 } IEM_MC_ENDIF();
5576 IEM_MC_ADVANCE_RIP();
5577 IEM_MC_END();
5578 }
5579 return VINF_SUCCESS;
5580}
5581
5582
5583/** Opcode 0x0f 0x94. */
5584FNIEMOP_DEF(iemOp_sete_Eb)
5585{
5586 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5587 IEMOP_HLP_MIN_386();
5588 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5589
5590 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5591 * any way. AMD says it's "unused", whatever that means. We're
5592 * ignoring for now. */
5593 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5594 {
5595 /* register target */
5596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5597 IEM_MC_BEGIN(0, 0);
5598 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5599 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5600 } IEM_MC_ELSE() {
5601 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5602 } IEM_MC_ENDIF();
5603 IEM_MC_ADVANCE_RIP();
5604 IEM_MC_END();
5605 }
5606 else
5607 {
5608 /* memory target */
5609 IEM_MC_BEGIN(0, 1);
5610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5613 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5614 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5615 } IEM_MC_ELSE() {
5616 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5617 } IEM_MC_ENDIF();
5618 IEM_MC_ADVANCE_RIP();
5619 IEM_MC_END();
5620 }
5621 return VINF_SUCCESS;
5622}
5623
5624
5625/** Opcode 0x0f 0x95. */
5626FNIEMOP_DEF(iemOp_setne_Eb)
5627{
5628 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5629 IEMOP_HLP_MIN_386();
5630 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5631
5632 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5633 * any way. AMD says it's "unused", whatever that means. We're
5634 * ignoring for now. */
5635 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5636 {
5637 /* register target */
5638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5639 IEM_MC_BEGIN(0, 0);
5640 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5641 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5642 } IEM_MC_ELSE() {
5643 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5644 } IEM_MC_ENDIF();
5645 IEM_MC_ADVANCE_RIP();
5646 IEM_MC_END();
5647 }
5648 else
5649 {
5650 /* memory target */
5651 IEM_MC_BEGIN(0, 1);
5652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5655 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5656 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5657 } IEM_MC_ELSE() {
5658 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5659 } IEM_MC_ENDIF();
5660 IEM_MC_ADVANCE_RIP();
5661 IEM_MC_END();
5662 }
5663 return VINF_SUCCESS;
5664}
5665
5666
5667/** Opcode 0x0f 0x96. */
5668FNIEMOP_DEF(iemOp_setbe_Eb)
5669{
5670 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5671 IEMOP_HLP_MIN_386();
5672 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5673
5674 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5675 * any way. AMD says it's "unused", whatever that means. We're
5676 * ignoring for now. */
5677 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5678 {
5679 /* register target */
5680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5681 IEM_MC_BEGIN(0, 0);
5682 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5683 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5684 } IEM_MC_ELSE() {
5685 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5686 } IEM_MC_ENDIF();
5687 IEM_MC_ADVANCE_RIP();
5688 IEM_MC_END();
5689 }
5690 else
5691 {
5692 /* memory target */
5693 IEM_MC_BEGIN(0, 1);
5694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5697 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5698 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5699 } IEM_MC_ELSE() {
5700 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5701 } IEM_MC_ENDIF();
5702 IEM_MC_ADVANCE_RIP();
5703 IEM_MC_END();
5704 }
5705 return VINF_SUCCESS;
5706}
5707
5708
5709/** Opcode 0x0f 0x97. */
5710FNIEMOP_DEF(iemOp_setnbe_Eb)
5711{
5712 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5713 IEMOP_HLP_MIN_386();
5714 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5715
5716 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5717 * any way. AMD says it's "unused", whatever that means. We're
5718 * ignoring for now. */
5719 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5720 {
5721 /* register target */
5722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5723 IEM_MC_BEGIN(0, 0);
5724 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5725 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5726 } IEM_MC_ELSE() {
5727 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5728 } IEM_MC_ENDIF();
5729 IEM_MC_ADVANCE_RIP();
5730 IEM_MC_END();
5731 }
5732 else
5733 {
5734 /* memory target */
5735 IEM_MC_BEGIN(0, 1);
5736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5739 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5740 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5741 } IEM_MC_ELSE() {
5742 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5743 } IEM_MC_ENDIF();
5744 IEM_MC_ADVANCE_RIP();
5745 IEM_MC_END();
5746 }
5747 return VINF_SUCCESS;
5748}
5749
5750
5751/** Opcode 0x0f 0x98. */
5752FNIEMOP_DEF(iemOp_sets_Eb)
5753{
5754 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5755 IEMOP_HLP_MIN_386();
5756 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5757
5758 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5759 * any way. AMD says it's "unused", whatever that means. We're
5760 * ignoring for now. */
5761 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5762 {
5763 /* register target */
5764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5765 IEM_MC_BEGIN(0, 0);
5766 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5767 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5768 } IEM_MC_ELSE() {
5769 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5770 } IEM_MC_ENDIF();
5771 IEM_MC_ADVANCE_RIP();
5772 IEM_MC_END();
5773 }
5774 else
5775 {
5776 /* memory target */
5777 IEM_MC_BEGIN(0, 1);
5778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5781 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5782 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5783 } IEM_MC_ELSE() {
5784 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5785 } IEM_MC_ENDIF();
5786 IEM_MC_ADVANCE_RIP();
5787 IEM_MC_END();
5788 }
5789 return VINF_SUCCESS;
5790}
5791
5792
5793/** Opcode 0x0f 0x99. */
5794FNIEMOP_DEF(iemOp_setns_Eb)
5795{
5796 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5797 IEMOP_HLP_MIN_386();
5798 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5799
5800 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5801 * any way. AMD says it's "unused", whatever that means. We're
5802 * ignoring for now. */
5803 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5804 {
5805 /* register target */
5806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5807 IEM_MC_BEGIN(0, 0);
5808 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5809 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5810 } IEM_MC_ELSE() {
5811 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5812 } IEM_MC_ENDIF();
5813 IEM_MC_ADVANCE_RIP();
5814 IEM_MC_END();
5815 }
5816 else
5817 {
5818 /* memory target */
5819 IEM_MC_BEGIN(0, 1);
5820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5823 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5824 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5825 } IEM_MC_ELSE() {
5826 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5827 } IEM_MC_ENDIF();
5828 IEM_MC_ADVANCE_RIP();
5829 IEM_MC_END();
5830 }
5831 return VINF_SUCCESS;
5832}
5833
5834
5835/** Opcode 0x0f 0x9a. */
5836FNIEMOP_DEF(iemOp_setp_Eb)
5837{
5838 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5839 IEMOP_HLP_MIN_386();
5840 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5841
5842 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5843 * any way. AMD says it's "unused", whatever that means. We're
5844 * ignoring for now. */
5845 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5846 {
5847 /* register target */
5848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5849 IEM_MC_BEGIN(0, 0);
5850 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5851 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5852 } IEM_MC_ELSE() {
5853 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5854 } IEM_MC_ENDIF();
5855 IEM_MC_ADVANCE_RIP();
5856 IEM_MC_END();
5857 }
5858 else
5859 {
5860 /* memory target */
5861 IEM_MC_BEGIN(0, 1);
5862 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5865 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5866 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5867 } IEM_MC_ELSE() {
5868 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5869 } IEM_MC_ENDIF();
5870 IEM_MC_ADVANCE_RIP();
5871 IEM_MC_END();
5872 }
5873 return VINF_SUCCESS;
5874}
5875
5876
5877/** Opcode 0x0f 0x9b. */
5878FNIEMOP_DEF(iemOp_setnp_Eb)
5879{
5880 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5881 IEMOP_HLP_MIN_386();
5882 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5883
5884 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5885 * any way. AMD says it's "unused", whatever that means. We're
5886 * ignoring for now. */
5887 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5888 {
5889 /* register target */
5890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5891 IEM_MC_BEGIN(0, 0);
5892 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5893 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5894 } IEM_MC_ELSE() {
5895 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5896 } IEM_MC_ENDIF();
5897 IEM_MC_ADVANCE_RIP();
5898 IEM_MC_END();
5899 }
5900 else
5901 {
5902 /* memory target */
5903 IEM_MC_BEGIN(0, 1);
5904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5905 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5907 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5908 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5909 } IEM_MC_ELSE() {
5910 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5911 } IEM_MC_ENDIF();
5912 IEM_MC_ADVANCE_RIP();
5913 IEM_MC_END();
5914 }
5915 return VINF_SUCCESS;
5916}
5917
5918
5919/** Opcode 0x0f 0x9c. */
5920FNIEMOP_DEF(iemOp_setl_Eb)
5921{
5922 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5923 IEMOP_HLP_MIN_386();
5924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5925
5926 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5927 * any way. AMD says it's "unused", whatever that means. We're
5928 * ignoring for now. */
5929 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5930 {
5931 /* register target */
5932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5933 IEM_MC_BEGIN(0, 0);
5934 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5935 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5936 } IEM_MC_ELSE() {
5937 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5938 } IEM_MC_ENDIF();
5939 IEM_MC_ADVANCE_RIP();
5940 IEM_MC_END();
5941 }
5942 else
5943 {
5944 /* memory target */
5945 IEM_MC_BEGIN(0, 1);
5946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5949 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5950 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5951 } IEM_MC_ELSE() {
5952 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5953 } IEM_MC_ENDIF();
5954 IEM_MC_ADVANCE_RIP();
5955 IEM_MC_END();
5956 }
5957 return VINF_SUCCESS;
5958}
5959
5960
5961/** Opcode 0x0f 0x9d. */
5962FNIEMOP_DEF(iemOp_setnl_Eb)
5963{
5964 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5965 IEMOP_HLP_MIN_386();
5966 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5967
5968 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5969 * any way. AMD says it's "unused", whatever that means. We're
5970 * ignoring for now. */
5971 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5972 {
5973 /* register target */
5974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5975 IEM_MC_BEGIN(0, 0);
5976 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5977 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5978 } IEM_MC_ELSE() {
5979 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5980 } IEM_MC_ENDIF();
5981 IEM_MC_ADVANCE_RIP();
5982 IEM_MC_END();
5983 }
5984 else
5985 {
5986 /* memory target */
5987 IEM_MC_BEGIN(0, 1);
5988 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5991 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5992 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5993 } IEM_MC_ELSE() {
5994 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5995 } IEM_MC_ENDIF();
5996 IEM_MC_ADVANCE_RIP();
5997 IEM_MC_END();
5998 }
5999 return VINF_SUCCESS;
6000}
6001
6002
6003/** Opcode 0x0f 0x9e. */
6004FNIEMOP_DEF(iemOp_setle_Eb)
6005{
6006 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
6007 IEMOP_HLP_MIN_386();
6008 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6009
6010 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6011 * any way. AMD says it's "unused", whatever that means. We're
6012 * ignoring for now. */
6013 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6014 {
6015 /* register target */
6016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6017 IEM_MC_BEGIN(0, 0);
6018 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6019 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6020 } IEM_MC_ELSE() {
6021 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6022 } IEM_MC_ENDIF();
6023 IEM_MC_ADVANCE_RIP();
6024 IEM_MC_END();
6025 }
6026 else
6027 {
6028 /* memory target */
6029 IEM_MC_BEGIN(0, 1);
6030 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6031 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6033 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6034 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6035 } IEM_MC_ELSE() {
6036 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6037 } IEM_MC_ENDIF();
6038 IEM_MC_ADVANCE_RIP();
6039 IEM_MC_END();
6040 }
6041 return VINF_SUCCESS;
6042}
6043
6044
6045/** Opcode 0x0f 0x9f. */
6046FNIEMOP_DEF(iemOp_setnle_Eb)
6047{
6048 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
6049 IEMOP_HLP_MIN_386();
6050 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6051
6052 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6053 * any way. AMD says it's "unused", whatever that means. We're
6054 * ignoring for now. */
6055 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6056 {
6057 /* register target */
6058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6059 IEM_MC_BEGIN(0, 0);
6060 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6061 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6062 } IEM_MC_ELSE() {
6063 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6064 } IEM_MC_ENDIF();
6065 IEM_MC_ADVANCE_RIP();
6066 IEM_MC_END();
6067 }
6068 else
6069 {
6070 /* memory target */
6071 IEM_MC_BEGIN(0, 1);
6072 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6075 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6076 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6077 } IEM_MC_ELSE() {
6078 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6079 } IEM_MC_ENDIF();
6080 IEM_MC_ADVANCE_RIP();
6081 IEM_MC_END();
6082 }
6083 return VINF_SUCCESS;
6084}
6085
6086
6087/**
6088 * Common 'push segment-register' helper.
6089 */
6090FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
6091{
6092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6093 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
6094 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6095
6096 switch (pVCpu->iem.s.enmEffOpSize)
6097 {
6098 case IEMMODE_16BIT:
6099 IEM_MC_BEGIN(0, 1);
6100 IEM_MC_LOCAL(uint16_t, u16Value);
6101 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
6102 IEM_MC_PUSH_U16(u16Value);
6103 IEM_MC_ADVANCE_RIP();
6104 IEM_MC_END();
6105 break;
6106
6107 case IEMMODE_32BIT:
6108 IEM_MC_BEGIN(0, 1);
6109 IEM_MC_LOCAL(uint32_t, u32Value);
6110 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
6111 IEM_MC_PUSH_U32_SREG(u32Value);
6112 IEM_MC_ADVANCE_RIP();
6113 IEM_MC_END();
6114 break;
6115
6116 case IEMMODE_64BIT:
6117 IEM_MC_BEGIN(0, 1);
6118 IEM_MC_LOCAL(uint64_t, u64Value);
6119 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
6120 IEM_MC_PUSH_U64(u64Value);
6121 IEM_MC_ADVANCE_RIP();
6122 IEM_MC_END();
6123 break;
6124 }
6125
6126 return VINF_SUCCESS;
6127}
6128
6129
6130/** Opcode 0x0f 0xa0. */
6131FNIEMOP_DEF(iemOp_push_fs)
6132{
6133 IEMOP_MNEMONIC(push_fs, "push fs");
6134 IEMOP_HLP_MIN_386();
6135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6136 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
6137}
6138
6139
6140/** Opcode 0x0f 0xa1. */
6141FNIEMOP_DEF(iemOp_pop_fs)
6142{
6143 IEMOP_MNEMONIC(pop_fs, "pop fs");
6144 IEMOP_HLP_MIN_386();
6145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6146 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
6147}
6148
6149
6150/** Opcode 0x0f 0xa2. */
6151FNIEMOP_DEF(iemOp_cpuid)
6152{
6153 IEMOP_MNEMONIC(cpuid, "cpuid");
6154 IEMOP_HLP_MIN_486(); /* not all 486es. */
6155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6156 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
6157}
6158
6159
6160/**
6161 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
6162 * iemOp_bts_Ev_Gv.
6163 */
6164FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
6165{
6166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6167 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6168
6169 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6170 {
6171 /* register destination. */
6172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6173 switch (pVCpu->iem.s.enmEffOpSize)
6174 {
6175 case IEMMODE_16BIT:
6176 IEM_MC_BEGIN(3, 0);
6177 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6178 IEM_MC_ARG(uint16_t, u16Src, 1);
6179 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6180
6181 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6182 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
6183 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6184 IEM_MC_REF_EFLAGS(pEFlags);
6185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6186
6187 IEM_MC_ADVANCE_RIP();
6188 IEM_MC_END();
6189 return VINF_SUCCESS;
6190
6191 case IEMMODE_32BIT:
6192 IEM_MC_BEGIN(3, 0);
6193 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6194 IEM_MC_ARG(uint32_t, u32Src, 1);
6195 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6196
6197 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6198 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
6199 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6200 IEM_MC_REF_EFLAGS(pEFlags);
6201 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6202
6203 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6204 IEM_MC_ADVANCE_RIP();
6205 IEM_MC_END();
6206 return VINF_SUCCESS;
6207
6208 case IEMMODE_64BIT:
6209 IEM_MC_BEGIN(3, 0);
6210 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6211 IEM_MC_ARG(uint64_t, u64Src, 1);
6212 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6213
6214 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6215 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
6216 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6217 IEM_MC_REF_EFLAGS(pEFlags);
6218 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6219
6220 IEM_MC_ADVANCE_RIP();
6221 IEM_MC_END();
6222 return VINF_SUCCESS;
6223
6224 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6225 }
6226 }
6227 else
6228 {
6229 /* memory destination. */
6230
6231 uint32_t fAccess;
6232 if (pImpl->pfnLockedU16)
6233 fAccess = IEM_ACCESS_DATA_RW;
6234 else /* BT */
6235 fAccess = IEM_ACCESS_DATA_R;
6236
6237 /** @todo test negative bit offsets! */
6238 switch (pVCpu->iem.s.enmEffOpSize)
6239 {
6240 case IEMMODE_16BIT:
6241 IEM_MC_BEGIN(3, 2);
6242 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6243 IEM_MC_ARG(uint16_t, u16Src, 1);
6244 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6246 IEM_MC_LOCAL(int16_t, i16AddrAdj);
6247
6248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6249 if (pImpl->pfnLockedU16)
6250 IEMOP_HLP_DONE_DECODING();
6251 else
6252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6253 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6254 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
6255 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
6256 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
6257 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
6258 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
6259 IEM_MC_FETCH_EFLAGS(EFlags);
6260
6261 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6262 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6263 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6264 else
6265 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6266 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6267
6268 IEM_MC_COMMIT_EFLAGS(EFlags);
6269 IEM_MC_ADVANCE_RIP();
6270 IEM_MC_END();
6271 return VINF_SUCCESS;
6272
6273 case IEMMODE_32BIT:
6274 IEM_MC_BEGIN(3, 2);
6275 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6276 IEM_MC_ARG(uint32_t, u32Src, 1);
6277 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6279 IEM_MC_LOCAL(int32_t, i32AddrAdj);
6280
6281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6282 if (pImpl->pfnLockedU16)
6283 IEMOP_HLP_DONE_DECODING();
6284 else
6285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6286 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6287 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
6288 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
6289 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
6290 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
6291 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
6292 IEM_MC_FETCH_EFLAGS(EFlags);
6293
6294 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6295 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6296 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6297 else
6298 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6299 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6300
6301 IEM_MC_COMMIT_EFLAGS(EFlags);
6302 IEM_MC_ADVANCE_RIP();
6303 IEM_MC_END();
6304 return VINF_SUCCESS;
6305
6306 case IEMMODE_64BIT:
6307 IEM_MC_BEGIN(3, 2);
6308 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6309 IEM_MC_ARG(uint64_t, u64Src, 1);
6310 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6311 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6312 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6313
6314 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6315 if (pImpl->pfnLockedU16)
6316 IEMOP_HLP_DONE_DECODING();
6317 else
6318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6319 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6320 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6321 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6322 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6323 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6324 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6325 IEM_MC_FETCH_EFLAGS(EFlags);
6326
6327 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6328 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6329 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6330 else
6331 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6332 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6333
6334 IEM_MC_COMMIT_EFLAGS(EFlags);
6335 IEM_MC_ADVANCE_RIP();
6336 IEM_MC_END();
6337 return VINF_SUCCESS;
6338
6339 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6340 }
6341 }
6342}
6343
6344
6345/** Opcode 0x0f 0xa3. */
6346FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6347{
6348 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6349 IEMOP_HLP_MIN_386();
6350 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6351}
6352
6353
6354/**
6355 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6356 */
6357FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6358{
6359 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6360 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6361
6362 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6363 {
6364 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6366
6367 switch (pVCpu->iem.s.enmEffOpSize)
6368 {
6369 case IEMMODE_16BIT:
6370 IEM_MC_BEGIN(4, 0);
6371 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6372 IEM_MC_ARG(uint16_t, u16Src, 1);
6373 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6374 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6375
6376 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6377 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6378 IEM_MC_REF_EFLAGS(pEFlags);
6379 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6380
6381 IEM_MC_ADVANCE_RIP();
6382 IEM_MC_END();
6383 return VINF_SUCCESS;
6384
6385 case IEMMODE_32BIT:
6386 IEM_MC_BEGIN(4, 0);
6387 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6388 IEM_MC_ARG(uint32_t, u32Src, 1);
6389 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6390 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6391
6392 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6393 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6394 IEM_MC_REF_EFLAGS(pEFlags);
6395 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6396
6397 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6398 IEM_MC_ADVANCE_RIP();
6399 IEM_MC_END();
6400 return VINF_SUCCESS;
6401
6402 case IEMMODE_64BIT:
6403 IEM_MC_BEGIN(4, 0);
6404 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6405 IEM_MC_ARG(uint64_t, u64Src, 1);
6406 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6407 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6408
6409 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6410 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6411 IEM_MC_REF_EFLAGS(pEFlags);
6412 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6413
6414 IEM_MC_ADVANCE_RIP();
6415 IEM_MC_END();
6416 return VINF_SUCCESS;
6417
6418 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6419 }
6420 }
6421 else
6422 {
6423 switch (pVCpu->iem.s.enmEffOpSize)
6424 {
6425 case IEMMODE_16BIT:
6426 IEM_MC_BEGIN(4, 2);
6427 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6428 IEM_MC_ARG(uint16_t, u16Src, 1);
6429 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6430 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6432
6433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6434 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6435 IEM_MC_ASSIGN(cShiftArg, cShift);
6436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6437 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6438 IEM_MC_FETCH_EFLAGS(EFlags);
6439 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6440 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6441
6442 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6443 IEM_MC_COMMIT_EFLAGS(EFlags);
6444 IEM_MC_ADVANCE_RIP();
6445 IEM_MC_END();
6446 return VINF_SUCCESS;
6447
6448 case IEMMODE_32BIT:
6449 IEM_MC_BEGIN(4, 2);
6450 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6451 IEM_MC_ARG(uint32_t, u32Src, 1);
6452 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6453 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6455
6456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6457 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6458 IEM_MC_ASSIGN(cShiftArg, cShift);
6459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6460 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6461 IEM_MC_FETCH_EFLAGS(EFlags);
6462 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6463 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6464
6465 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6466 IEM_MC_COMMIT_EFLAGS(EFlags);
6467 IEM_MC_ADVANCE_RIP();
6468 IEM_MC_END();
6469 return VINF_SUCCESS;
6470
6471 case IEMMODE_64BIT:
6472 IEM_MC_BEGIN(4, 2);
6473 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6474 IEM_MC_ARG(uint64_t, u64Src, 1);
6475 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6476 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6478
6479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6480 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6481 IEM_MC_ASSIGN(cShiftArg, cShift);
6482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6483 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6484 IEM_MC_FETCH_EFLAGS(EFlags);
6485 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6486 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6487
6488 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6489 IEM_MC_COMMIT_EFLAGS(EFlags);
6490 IEM_MC_ADVANCE_RIP();
6491 IEM_MC_END();
6492 return VINF_SUCCESS;
6493
6494 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6495 }
6496 }
6497}
6498
6499
6500/**
6501 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6502 */
6503FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6504{
6505 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6506 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6507
6508 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6509 {
6510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6511
6512 switch (pVCpu->iem.s.enmEffOpSize)
6513 {
6514 case IEMMODE_16BIT:
6515 IEM_MC_BEGIN(4, 0);
6516 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6517 IEM_MC_ARG(uint16_t, u16Src, 1);
6518 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6519 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6520
6521 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6522 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6523 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6524 IEM_MC_REF_EFLAGS(pEFlags);
6525 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6526
6527 IEM_MC_ADVANCE_RIP();
6528 IEM_MC_END();
6529 return VINF_SUCCESS;
6530
6531 case IEMMODE_32BIT:
6532 IEM_MC_BEGIN(4, 0);
6533 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6534 IEM_MC_ARG(uint32_t, u32Src, 1);
6535 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6536 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6537
6538 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6539 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6540 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6541 IEM_MC_REF_EFLAGS(pEFlags);
6542 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6543
6544 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6545 IEM_MC_ADVANCE_RIP();
6546 IEM_MC_END();
6547 return VINF_SUCCESS;
6548
6549 case IEMMODE_64BIT:
6550 IEM_MC_BEGIN(4, 0);
6551 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6552 IEM_MC_ARG(uint64_t, u64Src, 1);
6553 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6554 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6555
6556 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6557 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6558 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6559 IEM_MC_REF_EFLAGS(pEFlags);
6560 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6561
6562 IEM_MC_ADVANCE_RIP();
6563 IEM_MC_END();
6564 return VINF_SUCCESS;
6565
6566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6567 }
6568 }
6569 else
6570 {
6571 switch (pVCpu->iem.s.enmEffOpSize)
6572 {
6573 case IEMMODE_16BIT:
6574 IEM_MC_BEGIN(4, 2);
6575 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6576 IEM_MC_ARG(uint16_t, u16Src, 1);
6577 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6578 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6580
6581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6583 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6584 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6585 IEM_MC_FETCH_EFLAGS(EFlags);
6586 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6587 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6588
6589 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6590 IEM_MC_COMMIT_EFLAGS(EFlags);
6591 IEM_MC_ADVANCE_RIP();
6592 IEM_MC_END();
6593 return VINF_SUCCESS;
6594
6595 case IEMMODE_32BIT:
6596 IEM_MC_BEGIN(4, 2);
6597 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6598 IEM_MC_ARG(uint32_t, u32Src, 1);
6599 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6600 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6602
6603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6605 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6606 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6607 IEM_MC_FETCH_EFLAGS(EFlags);
6608 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6609 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6610
6611 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6612 IEM_MC_COMMIT_EFLAGS(EFlags);
6613 IEM_MC_ADVANCE_RIP();
6614 IEM_MC_END();
6615 return VINF_SUCCESS;
6616
6617 case IEMMODE_64BIT:
6618 IEM_MC_BEGIN(4, 2);
6619 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6620 IEM_MC_ARG(uint64_t, u64Src, 1);
6621 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6622 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6623 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6624
6625 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6627 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6628 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6629 IEM_MC_FETCH_EFLAGS(EFlags);
6630 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6631 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6632
6633 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6634 IEM_MC_COMMIT_EFLAGS(EFlags);
6635 IEM_MC_ADVANCE_RIP();
6636 IEM_MC_END();
6637 return VINF_SUCCESS;
6638
6639 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6640 }
6641 }
6642}
6643
6644
6645
6646/** Opcode 0x0f 0xa4. */
6647FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6648{
6649 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6650 IEMOP_HLP_MIN_386();
6651 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6652}
6653
6654
6655/** Opcode 0x0f 0xa5. */
6656FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6657{
6658 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6659 IEMOP_HLP_MIN_386();
6660 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6661}
6662
6663
6664/** Opcode 0x0f 0xa8. */
6665FNIEMOP_DEF(iemOp_push_gs)
6666{
6667 IEMOP_MNEMONIC(push_gs, "push gs");
6668 IEMOP_HLP_MIN_386();
6669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6670 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6671}
6672
6673
6674/** Opcode 0x0f 0xa9. */
6675FNIEMOP_DEF(iemOp_pop_gs)
6676{
6677 IEMOP_MNEMONIC(pop_gs, "pop gs");
6678 IEMOP_HLP_MIN_386();
6679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6680 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6681}
6682
6683
6684/** Opcode 0x0f 0xaa. */
6685FNIEMOP_DEF(iemOp_rsm)
6686{
6687 IEMOP_MNEMONIC(rsm, "rsm");
6688 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_RSM, SVM_EXIT_RSM, 0, 0);
6689 /** @todo rsm - for the regular case (above handles only the SVM nested-guest
6690 * intercept). */
6691 IEMOP_BITCH_ABOUT_STUB();
6692 return IEMOP_RAISE_INVALID_OPCODE();
6693}
6694
6695//IEMOP_HLP_MIN_386();
6696
6697
6698/** Opcode 0x0f 0xab. */
6699FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6700{
6701 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6702 IEMOP_HLP_MIN_386();
6703 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6704}
6705
6706
6707/** Opcode 0x0f 0xac. */
6708FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6709{
6710 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6711 IEMOP_HLP_MIN_386();
6712 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6713}
6714
6715
6716/** Opcode 0x0f 0xad. */
6717FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6718{
6719 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6720 IEMOP_HLP_MIN_386();
6721 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6722}
6723
6724
6725/** Opcode 0x0f 0xae mem/0. */
6726FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6727{
6728 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6729 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6730 return IEMOP_RAISE_INVALID_OPCODE();
6731
6732 IEM_MC_BEGIN(3, 1);
6733 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6734 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6735 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6738 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6739 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6740 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6741 IEM_MC_END();
6742 return VINF_SUCCESS;
6743}
6744
6745
6746/** Opcode 0x0f 0xae mem/1. */
6747FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6748{
6749 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6750 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6751 return IEMOP_RAISE_INVALID_OPCODE();
6752
6753 IEM_MC_BEGIN(3, 1);
6754 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6755 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6756 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6759 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6760 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6761 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6762 IEM_MC_END();
6763 return VINF_SUCCESS;
6764}
6765
6766
6767/**
6768 * @opmaps grp15
6769 * @opcode !11/2
6770 * @oppfx none
6771 * @opcpuid sse
6772 * @opgroup og_sse_mxcsrsm
6773 * @opxcpttype 5
6774 * @optest op1=0 -> mxcsr=0
6775 * @optest op1=0x2083 -> mxcsr=0x2083
6776 * @optest op1=0xfffffffe -> value.xcpt=0xd
6777 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6778 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6779 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6780 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6781 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6782 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6783 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6784 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6785 */
6786FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6787{
6788 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6789 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6790 return IEMOP_RAISE_INVALID_OPCODE();
6791
6792 IEM_MC_BEGIN(2, 0);
6793 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6794 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6797 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6798 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6799 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6800 IEM_MC_END();
6801 return VINF_SUCCESS;
6802}
6803
6804
6805/**
6806 * @opmaps grp15
6807 * @opcode !11/3
6808 * @oppfx none
6809 * @opcpuid sse
6810 * @opgroup og_sse_mxcsrsm
6811 * @opxcpttype 5
6812 * @optest mxcsr=0 -> op1=0
6813 * @optest mxcsr=0x2083 -> op1=0x2083
6814 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6815 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6816 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6817 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6818 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6819 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6820 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6821 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6822 */
6823FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6824{
6825 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6826 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6827 return IEMOP_RAISE_INVALID_OPCODE();
6828
6829 IEM_MC_BEGIN(2, 0);
6830 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6831 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6834 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6835 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6836 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6837 IEM_MC_END();
6838 return VINF_SUCCESS;
6839}
6840
6841
6842/**
6843 * @opmaps grp15
6844 * @opcode !11/4
6845 * @oppfx none
6846 * @opcpuid xsave
6847 * @opgroup og_system
6848 * @opxcpttype none
6849 */
6850FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6851{
6852 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
6853 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6854 return IEMOP_RAISE_INVALID_OPCODE();
6855
6856 IEM_MC_BEGIN(3, 0);
6857 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6858 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6859 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6862 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6863 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6864 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6865 IEM_MC_END();
6866 return VINF_SUCCESS;
6867}
6868
6869
6870/**
6871 * @opmaps grp15
6872 * @opcode !11/5
6873 * @oppfx none
6874 * @opcpuid xsave
6875 * @opgroup og_system
6876 * @opxcpttype none
6877 */
6878FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6879{
6880 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
6881 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6882 return IEMOP_RAISE_INVALID_OPCODE();
6883
6884 IEM_MC_BEGIN(3, 0);
6885 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6886 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6887 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6890 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6891 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6892 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6893 IEM_MC_END();
6894 return VINF_SUCCESS;
6895}
6896
6897/** Opcode 0x0f 0xae mem/6. */
6898FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6899
6900/**
6901 * @opmaps grp15
6902 * @opcode !11/7
6903 * @oppfx none
6904 * @opcpuid clfsh
6905 * @opgroup og_cachectl
6906 * @optest op1=1 ->
6907 */
6908FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6909{
6910 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6911 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6912 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6913
6914 IEM_MC_BEGIN(2, 0);
6915 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6916 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6919 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6920 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6921 IEM_MC_END();
6922 return VINF_SUCCESS;
6923}
6924
6925/**
6926 * @opmaps grp15
6927 * @opcode !11/7
6928 * @oppfx 0x66
6929 * @opcpuid clflushopt
6930 * @opgroup og_cachectl
6931 * @optest op1=1 ->
6932 */
6933FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6934{
6935 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6936 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6937 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6938
6939 IEM_MC_BEGIN(2, 0);
6940 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6941 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6944 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6945 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6946 IEM_MC_END();
6947 return VINF_SUCCESS;
6948}
6949
6950
6951/** Opcode 0x0f 0xae 11b/5. */
6952FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6953{
6954 RT_NOREF_PV(bRm);
6955 IEMOP_MNEMONIC(lfence, "lfence");
6956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6957 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6958 return IEMOP_RAISE_INVALID_OPCODE();
6959
6960 IEM_MC_BEGIN(0, 0);
6961 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6962 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6963 else
6964 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6965 IEM_MC_ADVANCE_RIP();
6966 IEM_MC_END();
6967 return VINF_SUCCESS;
6968}
6969
6970
6971/** Opcode 0x0f 0xae 11b/6. */
6972FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6973{
6974 RT_NOREF_PV(bRm);
6975 IEMOP_MNEMONIC(mfence, "mfence");
6976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6977 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6978 return IEMOP_RAISE_INVALID_OPCODE();
6979
6980 IEM_MC_BEGIN(0, 0);
6981 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6982 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6983 else
6984 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6985 IEM_MC_ADVANCE_RIP();
6986 IEM_MC_END();
6987 return VINF_SUCCESS;
6988}
6989
6990
6991/** Opcode 0x0f 0xae 11b/7. */
6992FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6993{
6994 RT_NOREF_PV(bRm);
6995 IEMOP_MNEMONIC(sfence, "sfence");
6996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6997 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6998 return IEMOP_RAISE_INVALID_OPCODE();
6999
7000 IEM_MC_BEGIN(0, 0);
7001 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7002 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
7003 else
7004 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7005 IEM_MC_ADVANCE_RIP();
7006 IEM_MC_END();
7007 return VINF_SUCCESS;
7008}
7009
7010
7011/** Opcode 0xf3 0x0f 0xae 11b/0. */
7012FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
7013{
7014 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
7015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7016 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7017 {
7018 IEM_MC_BEGIN(1, 0);
7019 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7020 IEM_MC_ARG(uint64_t, u64Dst, 0);
7021 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
7022 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
7023 IEM_MC_ADVANCE_RIP();
7024 IEM_MC_END();
7025 }
7026 else
7027 {
7028 IEM_MC_BEGIN(1, 0);
7029 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7030 IEM_MC_ARG(uint32_t, u32Dst, 0);
7031 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
7032 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
7033 IEM_MC_ADVANCE_RIP();
7034 IEM_MC_END();
7035 }
7036 return VINF_SUCCESS;
7037}
7038
7039/** Opcode 0xf3 0x0f 0xae 11b/1. */
7040FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
7041{
7042 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
7043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7044 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7045 {
7046 IEM_MC_BEGIN(1, 0);
7047 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7048 IEM_MC_ARG(uint64_t, u64Dst, 0);
7049 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
7050 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
7051 IEM_MC_ADVANCE_RIP();
7052 IEM_MC_END();
7053 }
7054 else
7055 {
7056 IEM_MC_BEGIN(1, 0);
7057 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7058 IEM_MC_ARG(uint32_t, u32Dst, 0);
7059 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
7060 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
7061 IEM_MC_ADVANCE_RIP();
7062 IEM_MC_END();
7063 }
7064 return VINF_SUCCESS;
7065}
7066
7067/** Opcode 0xf3 0x0f 0xae 11b/2. */
7068FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
7069{
7070 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
7071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7072 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7073 {
7074 IEM_MC_BEGIN(1, 0);
7075 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7076 IEM_MC_ARG(uint64_t, u64Dst, 0);
7077 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7078 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7079 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
7080 IEM_MC_ADVANCE_RIP();
7081 IEM_MC_END();
7082 }
7083 else
7084 {
7085 IEM_MC_BEGIN(1, 0);
7086 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7087 IEM_MC_ARG(uint32_t, u32Dst, 0);
7088 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7089 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
7090 IEM_MC_ADVANCE_RIP();
7091 IEM_MC_END();
7092 }
7093 return VINF_SUCCESS;
7094}
7095
7096/** Opcode 0xf3 0x0f 0xae 11b/3. */
7097FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
7098{
7099 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
7100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7101 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7102 {
7103 IEM_MC_BEGIN(1, 0);
7104 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7105 IEM_MC_ARG(uint64_t, u64Dst, 0);
7106 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7107 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7108 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
7109 IEM_MC_ADVANCE_RIP();
7110 IEM_MC_END();
7111 }
7112 else
7113 {
7114 IEM_MC_BEGIN(1, 0);
7115 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7116 IEM_MC_ARG(uint32_t, u32Dst, 0);
7117 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7118 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
7119 IEM_MC_ADVANCE_RIP();
7120 IEM_MC_END();
7121 }
7122 return VINF_SUCCESS;
7123}
7124
7125
7126/**
7127 * Group 15 jump table for register variant.
7128 */
7129IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
7130{ /* pfx: none, 066h, 0f3h, 0f2h */
7131 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
7132 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
7133 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
7134 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
7135 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7136 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7137 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7138 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7139};
7140AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
7141
7142
7143/**
7144 * Group 15 jump table for memory variant.
7145 */
7146IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
7147{ /* pfx: none, 066h, 0f3h, 0f2h */
7148 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7149 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7150 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7151 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7152 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7153 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7154 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7155 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7156};
7157AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
7158
7159
7160/** Opcode 0x0f 0xae. */
7161FNIEMOP_DEF(iemOp_Grp15)
7162{
7163 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
7164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7165 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7166 /* register, register */
7167 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7168 + pVCpu->iem.s.idxPrefix], bRm);
7169 /* memory, register */
7170 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7171 + pVCpu->iem.s.idxPrefix], bRm);
7172}
7173
7174
7175/** Opcode 0x0f 0xaf. */
7176FNIEMOP_DEF(iemOp_imul_Gv_Ev)
7177{
7178 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
7179 IEMOP_HLP_MIN_386();
7180 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7181 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
7182}
7183
7184
7185/** Opcode 0x0f 0xb0. */
7186FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
7187{
7188 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
7189 IEMOP_HLP_MIN_486();
7190 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7191
7192 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7193 {
7194 IEMOP_HLP_DONE_DECODING();
7195 IEM_MC_BEGIN(4, 0);
7196 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7197 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7198 IEM_MC_ARG(uint8_t, u8Src, 2);
7199 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7200
7201 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7202 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7203 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
7204 IEM_MC_REF_EFLAGS(pEFlags);
7205 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7206 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7207 else
7208 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7209
7210 IEM_MC_ADVANCE_RIP();
7211 IEM_MC_END();
7212 }
7213 else
7214 {
7215 IEM_MC_BEGIN(4, 3);
7216 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7217 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7218 IEM_MC_ARG(uint8_t, u8Src, 2);
7219 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7221 IEM_MC_LOCAL(uint8_t, u8Al);
7222
7223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7224 IEMOP_HLP_DONE_DECODING();
7225 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7226 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7227 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
7228 IEM_MC_FETCH_EFLAGS(EFlags);
7229 IEM_MC_REF_LOCAL(pu8Al, u8Al);
7230 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7231 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7232 else
7233 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7234
7235 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7236 IEM_MC_COMMIT_EFLAGS(EFlags);
7237 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
7238 IEM_MC_ADVANCE_RIP();
7239 IEM_MC_END();
7240 }
7241 return VINF_SUCCESS;
7242}
7243
7244/** Opcode 0x0f 0xb1. */
7245FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
7246{
7247 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
7248 IEMOP_HLP_MIN_486();
7249 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7250
7251 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7252 {
7253 IEMOP_HLP_DONE_DECODING();
7254 switch (pVCpu->iem.s.enmEffOpSize)
7255 {
7256 case IEMMODE_16BIT:
7257 IEM_MC_BEGIN(4, 0);
7258 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7259 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7260 IEM_MC_ARG(uint16_t, u16Src, 2);
7261 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7262
7263 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7264 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7265 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
7266 IEM_MC_REF_EFLAGS(pEFlags);
7267 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7268 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7269 else
7270 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7271
7272 IEM_MC_ADVANCE_RIP();
7273 IEM_MC_END();
7274 return VINF_SUCCESS;
7275
7276 case IEMMODE_32BIT:
7277 IEM_MC_BEGIN(4, 0);
7278 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7279 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7280 IEM_MC_ARG(uint32_t, u32Src, 2);
7281 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7282
7283 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7284 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7285 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
7286 IEM_MC_REF_EFLAGS(pEFlags);
7287 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7288 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7289 else
7290 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7291
7292 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
7293 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7294 IEM_MC_ADVANCE_RIP();
7295 IEM_MC_END();
7296 return VINF_SUCCESS;
7297
7298 case IEMMODE_64BIT:
7299 IEM_MC_BEGIN(4, 0);
7300 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7301 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7302#ifdef RT_ARCH_X86
7303 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7304#else
7305 IEM_MC_ARG(uint64_t, u64Src, 2);
7306#endif
7307 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7308
7309 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7310 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
7311 IEM_MC_REF_EFLAGS(pEFlags);
7312#ifdef RT_ARCH_X86
7313 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7314 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7315 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7316 else
7317 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7318#else
7319 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7320 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7321 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7322 else
7323 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7324#endif
7325
7326 IEM_MC_ADVANCE_RIP();
7327 IEM_MC_END();
7328 return VINF_SUCCESS;
7329
7330 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7331 }
7332 }
7333 else
7334 {
7335 switch (pVCpu->iem.s.enmEffOpSize)
7336 {
7337 case IEMMODE_16BIT:
7338 IEM_MC_BEGIN(4, 3);
7339 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7340 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7341 IEM_MC_ARG(uint16_t, u16Src, 2);
7342 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7344 IEM_MC_LOCAL(uint16_t, u16Ax);
7345
7346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7347 IEMOP_HLP_DONE_DECODING();
7348 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7349 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7350 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
7351 IEM_MC_FETCH_EFLAGS(EFlags);
7352 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
7353 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7354 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7355 else
7356 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7357
7358 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7359 IEM_MC_COMMIT_EFLAGS(EFlags);
7360 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
7361 IEM_MC_ADVANCE_RIP();
7362 IEM_MC_END();
7363 return VINF_SUCCESS;
7364
7365 case IEMMODE_32BIT:
7366 IEM_MC_BEGIN(4, 3);
7367 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7368 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7369 IEM_MC_ARG(uint32_t, u32Src, 2);
7370 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7372 IEM_MC_LOCAL(uint32_t, u32Eax);
7373
7374 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7375 IEMOP_HLP_DONE_DECODING();
7376 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7377 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7378 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
7379 IEM_MC_FETCH_EFLAGS(EFlags);
7380 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
7381 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7382 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7383 else
7384 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7385
7386 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7387 IEM_MC_COMMIT_EFLAGS(EFlags);
7388 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
7389 IEM_MC_ADVANCE_RIP();
7390 IEM_MC_END();
7391 return VINF_SUCCESS;
7392
7393 case IEMMODE_64BIT:
7394 IEM_MC_BEGIN(4, 3);
7395 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7396 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7397#ifdef RT_ARCH_X86
7398 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7399#else
7400 IEM_MC_ARG(uint64_t, u64Src, 2);
7401#endif
7402 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7404 IEM_MC_LOCAL(uint64_t, u64Rax);
7405
7406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7407 IEMOP_HLP_DONE_DECODING();
7408 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7409 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
7410 IEM_MC_FETCH_EFLAGS(EFlags);
7411 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
7412#ifdef RT_ARCH_X86
7413 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7414 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7415 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7416 else
7417 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7418#else
7419 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7420 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7421 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7422 else
7423 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7424#endif
7425
7426 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7427 IEM_MC_COMMIT_EFLAGS(EFlags);
7428 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
7429 IEM_MC_ADVANCE_RIP();
7430 IEM_MC_END();
7431 return VINF_SUCCESS;
7432
7433 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7434 }
7435 }
7436}
7437
7438
7439FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
7440{
7441 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
7442 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
7443
7444 switch (pVCpu->iem.s.enmEffOpSize)
7445 {
7446 case IEMMODE_16BIT:
7447 IEM_MC_BEGIN(5, 1);
7448 IEM_MC_ARG(uint16_t, uSel, 0);
7449 IEM_MC_ARG(uint16_t, offSeg, 1);
7450 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7451 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7452 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7453 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7456 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7457 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
7458 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7459 IEM_MC_END();
7460 return VINF_SUCCESS;
7461
7462 case IEMMODE_32BIT:
7463 IEM_MC_BEGIN(5, 1);
7464 IEM_MC_ARG(uint16_t, uSel, 0);
7465 IEM_MC_ARG(uint32_t, offSeg, 1);
7466 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7467 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7468 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7469 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7472 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7473 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
7474 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7475 IEM_MC_END();
7476 return VINF_SUCCESS;
7477
7478 case IEMMODE_64BIT:
7479 IEM_MC_BEGIN(5, 1);
7480 IEM_MC_ARG(uint16_t, uSel, 0);
7481 IEM_MC_ARG(uint64_t, offSeg, 1);
7482 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7483 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7484 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7485 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7488 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
7489 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7490 else
7491 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7492 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
7493 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7494 IEM_MC_END();
7495 return VINF_SUCCESS;
7496
7497 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7498 }
7499}
7500
7501
7502/** Opcode 0x0f 0xb2. */
7503FNIEMOP_DEF(iemOp_lss_Gv_Mp)
7504{
7505 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
7506 IEMOP_HLP_MIN_386();
7507 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7508 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7509 return IEMOP_RAISE_INVALID_OPCODE();
7510 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
7511}
7512
7513
7514/** Opcode 0x0f 0xb3. */
7515FNIEMOP_DEF(iemOp_btr_Ev_Gv)
7516{
7517 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
7518 IEMOP_HLP_MIN_386();
7519 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
7520}
7521
7522
7523/** Opcode 0x0f 0xb4. */
7524FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
7525{
7526 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
7527 IEMOP_HLP_MIN_386();
7528 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7529 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7530 return IEMOP_RAISE_INVALID_OPCODE();
7531 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
7532}
7533
7534
7535/** Opcode 0x0f 0xb5. */
7536FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
7537{
7538 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
7539 IEMOP_HLP_MIN_386();
7540 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7541 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7542 return IEMOP_RAISE_INVALID_OPCODE();
7543 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
7544}
7545
7546
7547/** Opcode 0x0f 0xb6. */
7548FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
7549{
7550 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
7551 IEMOP_HLP_MIN_386();
7552
7553 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7554
7555 /*
7556 * If rm is denoting a register, no more instruction bytes.
7557 */
7558 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7559 {
7560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7561 switch (pVCpu->iem.s.enmEffOpSize)
7562 {
7563 case IEMMODE_16BIT:
7564 IEM_MC_BEGIN(0, 1);
7565 IEM_MC_LOCAL(uint16_t, u16Value);
7566 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7567 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7568 IEM_MC_ADVANCE_RIP();
7569 IEM_MC_END();
7570 return VINF_SUCCESS;
7571
7572 case IEMMODE_32BIT:
7573 IEM_MC_BEGIN(0, 1);
7574 IEM_MC_LOCAL(uint32_t, u32Value);
7575 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7576 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7577 IEM_MC_ADVANCE_RIP();
7578 IEM_MC_END();
7579 return VINF_SUCCESS;
7580
7581 case IEMMODE_64BIT:
7582 IEM_MC_BEGIN(0, 1);
7583 IEM_MC_LOCAL(uint64_t, u64Value);
7584 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7585 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7586 IEM_MC_ADVANCE_RIP();
7587 IEM_MC_END();
7588 return VINF_SUCCESS;
7589
7590 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7591 }
7592 }
7593 else
7594 {
7595 /*
7596 * We're loading a register from memory.
7597 */
7598 switch (pVCpu->iem.s.enmEffOpSize)
7599 {
7600 case IEMMODE_16BIT:
7601 IEM_MC_BEGIN(0, 2);
7602 IEM_MC_LOCAL(uint16_t, u16Value);
7603 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7606 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7607 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7608 IEM_MC_ADVANCE_RIP();
7609 IEM_MC_END();
7610 return VINF_SUCCESS;
7611
7612 case IEMMODE_32BIT:
7613 IEM_MC_BEGIN(0, 2);
7614 IEM_MC_LOCAL(uint32_t, u32Value);
7615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7618 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7619 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7620 IEM_MC_ADVANCE_RIP();
7621 IEM_MC_END();
7622 return VINF_SUCCESS;
7623
7624 case IEMMODE_64BIT:
7625 IEM_MC_BEGIN(0, 2);
7626 IEM_MC_LOCAL(uint64_t, u64Value);
7627 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7630 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7631 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7632 IEM_MC_ADVANCE_RIP();
7633 IEM_MC_END();
7634 return VINF_SUCCESS;
7635
7636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7637 }
7638 }
7639}
7640
7641
7642/** Opcode 0x0f 0xb7. */
7643FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7644{
7645 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7646 IEMOP_HLP_MIN_386();
7647
7648 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7649
7650 /** @todo Not entirely sure how the operand size prefix is handled here,
7651 * assuming that it will be ignored. Would be nice to have a few
7652 * test for this. */
7653 /*
7654 * If rm is denoting a register, no more instruction bytes.
7655 */
7656 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7657 {
7658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7659 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7660 {
7661 IEM_MC_BEGIN(0, 1);
7662 IEM_MC_LOCAL(uint32_t, u32Value);
7663 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7664 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7665 IEM_MC_ADVANCE_RIP();
7666 IEM_MC_END();
7667 }
7668 else
7669 {
7670 IEM_MC_BEGIN(0, 1);
7671 IEM_MC_LOCAL(uint64_t, u64Value);
7672 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7673 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7674 IEM_MC_ADVANCE_RIP();
7675 IEM_MC_END();
7676 }
7677 }
7678 else
7679 {
7680 /*
7681 * We're loading a register from memory.
7682 */
7683 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7684 {
7685 IEM_MC_BEGIN(0, 2);
7686 IEM_MC_LOCAL(uint32_t, u32Value);
7687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7688 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7690 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7691 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7692 IEM_MC_ADVANCE_RIP();
7693 IEM_MC_END();
7694 }
7695 else
7696 {
7697 IEM_MC_BEGIN(0, 2);
7698 IEM_MC_LOCAL(uint64_t, u64Value);
7699 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7702 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7703 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7704 IEM_MC_ADVANCE_RIP();
7705 IEM_MC_END();
7706 }
7707 }
7708 return VINF_SUCCESS;
7709}
7710
7711
7712/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7713FNIEMOP_UD_STUB(iemOp_jmpe);
7714/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7715FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7716
7717
7718/**
7719 * @opcode 0xb9
7720 * @opinvalid intel-modrm
7721 * @optest ->
7722 */
7723FNIEMOP_DEF(iemOp_Grp10)
7724{
7725 /*
7726 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7727 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7728 */
7729 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7730 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
7731 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7732}
7733
7734
7735/** Opcode 0x0f 0xba. */
7736FNIEMOP_DEF(iemOp_Grp8)
7737{
7738 IEMOP_HLP_MIN_386();
7739 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7740 PCIEMOPBINSIZES pImpl;
7741 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7742 {
7743 case 0: case 1: case 2: case 3:
7744 /* Both AMD and Intel want full modr/m decoding and imm8. */
7745 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7746 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7747 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7748 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7749 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7750 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7751 }
7752 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7753
7754 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7755 {
7756 /* register destination. */
7757 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7759
7760 switch (pVCpu->iem.s.enmEffOpSize)
7761 {
7762 case IEMMODE_16BIT:
7763 IEM_MC_BEGIN(3, 0);
7764 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7765 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7766 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7767
7768 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7769 IEM_MC_REF_EFLAGS(pEFlags);
7770 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7771
7772 IEM_MC_ADVANCE_RIP();
7773 IEM_MC_END();
7774 return VINF_SUCCESS;
7775
7776 case IEMMODE_32BIT:
7777 IEM_MC_BEGIN(3, 0);
7778 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7779 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7780 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7781
7782 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7783 IEM_MC_REF_EFLAGS(pEFlags);
7784 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7785
7786 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7787 IEM_MC_ADVANCE_RIP();
7788 IEM_MC_END();
7789 return VINF_SUCCESS;
7790
7791 case IEMMODE_64BIT:
7792 IEM_MC_BEGIN(3, 0);
7793 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7794 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7795 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7796
7797 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7798 IEM_MC_REF_EFLAGS(pEFlags);
7799 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7800
7801 IEM_MC_ADVANCE_RIP();
7802 IEM_MC_END();
7803 return VINF_SUCCESS;
7804
7805 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7806 }
7807 }
7808 else
7809 {
7810 /* memory destination. */
7811
7812 uint32_t fAccess;
7813 if (pImpl->pfnLockedU16)
7814 fAccess = IEM_ACCESS_DATA_RW;
7815 else /* BT */
7816 fAccess = IEM_ACCESS_DATA_R;
7817
7818 /** @todo test negative bit offsets! */
7819 switch (pVCpu->iem.s.enmEffOpSize)
7820 {
7821 case IEMMODE_16BIT:
7822 IEM_MC_BEGIN(3, 1);
7823 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7824 IEM_MC_ARG(uint16_t, u16Src, 1);
7825 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7827
7828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7829 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7830 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7831 if (pImpl->pfnLockedU16)
7832 IEMOP_HLP_DONE_DECODING();
7833 else
7834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7835 IEM_MC_FETCH_EFLAGS(EFlags);
7836 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7837 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7838 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7839 else
7840 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7841 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7842
7843 IEM_MC_COMMIT_EFLAGS(EFlags);
7844 IEM_MC_ADVANCE_RIP();
7845 IEM_MC_END();
7846 return VINF_SUCCESS;
7847
7848 case IEMMODE_32BIT:
7849 IEM_MC_BEGIN(3, 1);
7850 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7851 IEM_MC_ARG(uint32_t, u32Src, 1);
7852 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7853 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7854
7855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7856 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7857 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7858 if (pImpl->pfnLockedU16)
7859 IEMOP_HLP_DONE_DECODING();
7860 else
7861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7862 IEM_MC_FETCH_EFLAGS(EFlags);
7863 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7864 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7865 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7866 else
7867 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7868 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7869
7870 IEM_MC_COMMIT_EFLAGS(EFlags);
7871 IEM_MC_ADVANCE_RIP();
7872 IEM_MC_END();
7873 return VINF_SUCCESS;
7874
7875 case IEMMODE_64BIT:
7876 IEM_MC_BEGIN(3, 1);
7877 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7878 IEM_MC_ARG(uint64_t, u64Src, 1);
7879 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7880 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7881
7882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7883 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7884 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7885 if (pImpl->pfnLockedU16)
7886 IEMOP_HLP_DONE_DECODING();
7887 else
7888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7889 IEM_MC_FETCH_EFLAGS(EFlags);
7890 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7891 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7892 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7893 else
7894 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7895 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7896
7897 IEM_MC_COMMIT_EFLAGS(EFlags);
7898 IEM_MC_ADVANCE_RIP();
7899 IEM_MC_END();
7900 return VINF_SUCCESS;
7901
7902 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7903 }
7904 }
7905}
7906
7907
7908/** Opcode 0x0f 0xbb. */
7909FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7910{
7911 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7912 IEMOP_HLP_MIN_386();
7913 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7914}
7915
7916
7917/** Opcode 0x0f 0xbc. */
7918FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7919{
7920 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7921 IEMOP_HLP_MIN_386();
7922 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7923 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7924}
7925
7926
7927/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7928FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7929
7930
7931/** Opcode 0x0f 0xbd. */
7932FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7933{
7934 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7935 IEMOP_HLP_MIN_386();
7936 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7937 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7938}
7939
7940
7941/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7942FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7943
7944
7945/** Opcode 0x0f 0xbe. */
7946FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7947{
7948 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7949 IEMOP_HLP_MIN_386();
7950
7951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7952
7953 /*
7954 * If rm is denoting a register, no more instruction bytes.
7955 */
7956 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7957 {
7958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7959 switch (pVCpu->iem.s.enmEffOpSize)
7960 {
7961 case IEMMODE_16BIT:
7962 IEM_MC_BEGIN(0, 1);
7963 IEM_MC_LOCAL(uint16_t, u16Value);
7964 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7965 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7966 IEM_MC_ADVANCE_RIP();
7967 IEM_MC_END();
7968 return VINF_SUCCESS;
7969
7970 case IEMMODE_32BIT:
7971 IEM_MC_BEGIN(0, 1);
7972 IEM_MC_LOCAL(uint32_t, u32Value);
7973 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7974 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7975 IEM_MC_ADVANCE_RIP();
7976 IEM_MC_END();
7977 return VINF_SUCCESS;
7978
7979 case IEMMODE_64BIT:
7980 IEM_MC_BEGIN(0, 1);
7981 IEM_MC_LOCAL(uint64_t, u64Value);
7982 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7983 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7984 IEM_MC_ADVANCE_RIP();
7985 IEM_MC_END();
7986 return VINF_SUCCESS;
7987
7988 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7989 }
7990 }
7991 else
7992 {
7993 /*
7994 * We're loading a register from memory.
7995 */
7996 switch (pVCpu->iem.s.enmEffOpSize)
7997 {
7998 case IEMMODE_16BIT:
7999 IEM_MC_BEGIN(0, 2);
8000 IEM_MC_LOCAL(uint16_t, u16Value);
8001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8004 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8005 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
8006 IEM_MC_ADVANCE_RIP();
8007 IEM_MC_END();
8008 return VINF_SUCCESS;
8009
8010 case IEMMODE_32BIT:
8011 IEM_MC_BEGIN(0, 2);
8012 IEM_MC_LOCAL(uint32_t, u32Value);
8013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8016 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8017 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8018 IEM_MC_ADVANCE_RIP();
8019 IEM_MC_END();
8020 return VINF_SUCCESS;
8021
8022 case IEMMODE_64BIT:
8023 IEM_MC_BEGIN(0, 2);
8024 IEM_MC_LOCAL(uint64_t, u64Value);
8025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8028 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8029 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8030 IEM_MC_ADVANCE_RIP();
8031 IEM_MC_END();
8032 return VINF_SUCCESS;
8033
8034 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8035 }
8036 }
8037}
8038
8039
8040/** Opcode 0x0f 0xbf. */
8041FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
8042{
8043 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
8044 IEMOP_HLP_MIN_386();
8045
8046 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8047
8048 /** @todo Not entirely sure how the operand size prefix is handled here,
8049 * assuming that it will be ignored. Would be nice to have a few
8050 * test for this. */
8051 /*
8052 * If rm is denoting a register, no more instruction bytes.
8053 */
8054 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8055 {
8056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8057 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8058 {
8059 IEM_MC_BEGIN(0, 1);
8060 IEM_MC_LOCAL(uint32_t, u32Value);
8061 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8062 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8063 IEM_MC_ADVANCE_RIP();
8064 IEM_MC_END();
8065 }
8066 else
8067 {
8068 IEM_MC_BEGIN(0, 1);
8069 IEM_MC_LOCAL(uint64_t, u64Value);
8070 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8071 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8072 IEM_MC_ADVANCE_RIP();
8073 IEM_MC_END();
8074 }
8075 }
8076 else
8077 {
8078 /*
8079 * We're loading a register from memory.
8080 */
8081 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8082 {
8083 IEM_MC_BEGIN(0, 2);
8084 IEM_MC_LOCAL(uint32_t, u32Value);
8085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8088 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8089 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8090 IEM_MC_ADVANCE_RIP();
8091 IEM_MC_END();
8092 }
8093 else
8094 {
8095 IEM_MC_BEGIN(0, 2);
8096 IEM_MC_LOCAL(uint64_t, u64Value);
8097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8098 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8100 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8101 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8102 IEM_MC_ADVANCE_RIP();
8103 IEM_MC_END();
8104 }
8105 }
8106 return VINF_SUCCESS;
8107}
8108
8109
8110/** Opcode 0x0f 0xc0. */
8111FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
8112{
8113 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8114 IEMOP_HLP_MIN_486();
8115 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
8116
8117 /*
8118 * If rm is denoting a register, no more instruction bytes.
8119 */
8120 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8121 {
8122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8123
8124 IEM_MC_BEGIN(3, 0);
8125 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8126 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8127 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8128
8129 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8130 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8131 IEM_MC_REF_EFLAGS(pEFlags);
8132 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8133
8134 IEM_MC_ADVANCE_RIP();
8135 IEM_MC_END();
8136 }
8137 else
8138 {
8139 /*
8140 * We're accessing memory.
8141 */
8142 IEM_MC_BEGIN(3, 3);
8143 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8144 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8145 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8146 IEM_MC_LOCAL(uint8_t, u8RegCopy);
8147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8148
8149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8150 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8151 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8152 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
8153 IEM_MC_FETCH_EFLAGS(EFlags);
8154 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8155 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8156 else
8157 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
8158
8159 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
8160 IEM_MC_COMMIT_EFLAGS(EFlags);
8161 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
8162 IEM_MC_ADVANCE_RIP();
8163 IEM_MC_END();
8164 return VINF_SUCCESS;
8165 }
8166 return VINF_SUCCESS;
8167}
8168
8169
8170/** Opcode 0x0f 0xc1. */
8171FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
8172{
8173 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
8174 IEMOP_HLP_MIN_486();
8175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8176
8177 /*
8178 * If rm is denoting a register, no more instruction bytes.
8179 */
8180 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8181 {
8182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8183
8184 switch (pVCpu->iem.s.enmEffOpSize)
8185 {
8186 case IEMMODE_16BIT:
8187 IEM_MC_BEGIN(3, 0);
8188 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8189 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8190 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8191
8192 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8193 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8194 IEM_MC_REF_EFLAGS(pEFlags);
8195 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8196
8197 IEM_MC_ADVANCE_RIP();
8198 IEM_MC_END();
8199 return VINF_SUCCESS;
8200
8201 case IEMMODE_32BIT:
8202 IEM_MC_BEGIN(3, 0);
8203 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8204 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8205 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8206
8207 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8208 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8209 IEM_MC_REF_EFLAGS(pEFlags);
8210 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8211
8212 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8213 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
8214 IEM_MC_ADVANCE_RIP();
8215 IEM_MC_END();
8216 return VINF_SUCCESS;
8217
8218 case IEMMODE_64BIT:
8219 IEM_MC_BEGIN(3, 0);
8220 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8221 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8222 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8223
8224 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8225 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8226 IEM_MC_REF_EFLAGS(pEFlags);
8227 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8228
8229 IEM_MC_ADVANCE_RIP();
8230 IEM_MC_END();
8231 return VINF_SUCCESS;
8232
8233 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8234 }
8235 }
8236 else
8237 {
8238 /*
8239 * We're accessing memory.
8240 */
8241 switch (pVCpu->iem.s.enmEffOpSize)
8242 {
8243 case IEMMODE_16BIT:
8244 IEM_MC_BEGIN(3, 3);
8245 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8246 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8247 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8248 IEM_MC_LOCAL(uint16_t, u16RegCopy);
8249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8250
8251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8252 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8253 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8254 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
8255 IEM_MC_FETCH_EFLAGS(EFlags);
8256 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8257 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8258 else
8259 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
8260
8261 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8262 IEM_MC_COMMIT_EFLAGS(EFlags);
8263 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
8264 IEM_MC_ADVANCE_RIP();
8265 IEM_MC_END();
8266 return VINF_SUCCESS;
8267
8268 case IEMMODE_32BIT:
8269 IEM_MC_BEGIN(3, 3);
8270 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8271 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8272 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8273 IEM_MC_LOCAL(uint32_t, u32RegCopy);
8274 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8275
8276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8277 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8278 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8279 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
8280 IEM_MC_FETCH_EFLAGS(EFlags);
8281 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8282 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8283 else
8284 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
8285
8286 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8287 IEM_MC_COMMIT_EFLAGS(EFlags);
8288 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
8289 IEM_MC_ADVANCE_RIP();
8290 IEM_MC_END();
8291 return VINF_SUCCESS;
8292
8293 case IEMMODE_64BIT:
8294 IEM_MC_BEGIN(3, 3);
8295 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8296 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8297 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8298 IEM_MC_LOCAL(uint64_t, u64RegCopy);
8299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8300
8301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8302 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8303 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8304 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
8305 IEM_MC_FETCH_EFLAGS(EFlags);
8306 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8307 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8308 else
8309 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
8310
8311 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8312 IEM_MC_COMMIT_EFLAGS(EFlags);
8313 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
8314 IEM_MC_ADVANCE_RIP();
8315 IEM_MC_END();
8316 return VINF_SUCCESS;
8317
8318 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8319 }
8320 }
8321}
8322
8323
8324/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
8325FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
8326/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
8327FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
8328/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
8329FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
8330/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
8331FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
8332
8333
8334/** Opcode 0x0f 0xc3. */
8335FNIEMOP_DEF(iemOp_movnti_My_Gy)
8336{
8337 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
8338
8339 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8340
8341 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
8342 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8343 {
8344 switch (pVCpu->iem.s.enmEffOpSize)
8345 {
8346 case IEMMODE_32BIT:
8347 IEM_MC_BEGIN(0, 2);
8348 IEM_MC_LOCAL(uint32_t, u32Value);
8349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8350
8351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8353 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8354 return IEMOP_RAISE_INVALID_OPCODE();
8355
8356 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8357 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
8358 IEM_MC_ADVANCE_RIP();
8359 IEM_MC_END();
8360 break;
8361
8362 case IEMMODE_64BIT:
8363 IEM_MC_BEGIN(0, 2);
8364 IEM_MC_LOCAL(uint64_t, u64Value);
8365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8366
8367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8369 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8370 return IEMOP_RAISE_INVALID_OPCODE();
8371
8372 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8373 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
8374 IEM_MC_ADVANCE_RIP();
8375 IEM_MC_END();
8376 break;
8377
8378 case IEMMODE_16BIT:
8379 /** @todo check this form. */
8380 return IEMOP_RAISE_INVALID_OPCODE();
8381 }
8382 }
8383 else
8384 return IEMOP_RAISE_INVALID_OPCODE();
8385 return VINF_SUCCESS;
8386}
8387/* Opcode 0x66 0x0f 0xc3 - invalid */
8388/* Opcode 0xf3 0x0f 0xc3 - invalid */
8389/* Opcode 0xf2 0x0f 0xc3 - invalid */
8390
8391/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
8392FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
8393/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
8394FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
8395/* Opcode 0xf3 0x0f 0xc4 - invalid */
8396/* Opcode 0xf2 0x0f 0xc4 - invalid */
8397
8398/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
8399FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
8400/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
8401FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
8402/* Opcode 0xf3 0x0f 0xc5 - invalid */
8403/* Opcode 0xf2 0x0f 0xc5 - invalid */
8404
8405/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
8406FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
8407/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
8408FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
8409/* Opcode 0xf3 0x0f 0xc6 - invalid */
8410/* Opcode 0xf2 0x0f 0xc6 - invalid */
8411
8412
8413/** Opcode 0x0f 0xc7 !11/1. */
8414FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
8415{
8416 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
8417
8418 IEM_MC_BEGIN(4, 3);
8419 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
8420 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
8421 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
8422 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8423 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
8424 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
8425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8426
8427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8428 IEMOP_HLP_DONE_DECODING();
8429 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8430
8431 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
8432 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
8433 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
8434
8435 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
8436 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
8437 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
8438
8439 IEM_MC_FETCH_EFLAGS(EFlags);
8440 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8441 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8442 else
8443 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8444
8445 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
8446 IEM_MC_COMMIT_EFLAGS(EFlags);
8447 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8448 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
8449 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
8450 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
8451 IEM_MC_ENDIF();
8452 IEM_MC_ADVANCE_RIP();
8453
8454 IEM_MC_END();
8455 return VINF_SUCCESS;
8456}
8457
8458
8459/** Opcode REX.W 0x0f 0xc7 !11/1. */
8460FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
8461{
8462 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
8463 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8464 {
8465#if 0
8466 RT_NOREF(bRm);
8467 IEMOP_BITCH_ABOUT_STUB();
8468 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8469#else
8470 IEM_MC_BEGIN(4, 3);
8471 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
8472 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
8473 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
8474 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8475 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
8476 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
8477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8478
8479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8480 IEMOP_HLP_DONE_DECODING();
8481 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
8482 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8483
8484 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
8485 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
8486 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
8487
8488 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
8489 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
8490 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
8491
8492 IEM_MC_FETCH_EFLAGS(EFlags);
8493# ifdef RT_ARCH_AMD64
8494 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8495 {
8496 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8497 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8498 else
8499 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8500 }
8501 else
8502# endif
8503 {
8504 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
8505 accesses and not all all atomic, which works fine on in UNI CPU guest
8506 configuration (ignoring DMA). If guest SMP is active we have no choice
8507 but to use a rendezvous callback here. Sigh. */
8508 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
8509 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8510 else
8511 {
8512 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8513 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
8514 }
8515 }
8516
8517 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
8518 IEM_MC_COMMIT_EFLAGS(EFlags);
8519 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8520 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
8521 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
8522 IEM_MC_ENDIF();
8523 IEM_MC_ADVANCE_RIP();
8524
8525 IEM_MC_END();
8526 return VINF_SUCCESS;
8527#endif
8528 }
8529 Log(("cmpxchg16b -> #UD\n"));
8530 return IEMOP_RAISE_INVALID_OPCODE();
8531}
8532
8533FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
8534{
8535 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
8536 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
8537 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
8538}
8539
8540/** Opcode 0x0f 0xc7 11/6. */
8541FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
8542
8543/** Opcode 0x0f 0xc7 !11/6. */
8544FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
8545
8546/** Opcode 0x66 0x0f 0xc7 !11/6. */
8547FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
8548
8549/** Opcode 0xf3 0x0f 0xc7 !11/6. */
8550FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
8551
8552/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
8553FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
8554
8555/** Opcode 0x0f 0xc7 11/7. */
8556FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
8557
8558
8559/**
8560 * Group 9 jump table for register variant.
8561 */
8562IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
8563{ /* pfx: none, 066h, 0f3h, 0f2h */
8564 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8565 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
8566 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8567 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8568 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8569 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8570 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8571 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8572};
8573AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
8574
8575
8576/**
8577 * Group 9 jump table for memory variant.
8578 */
8579IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
8580{ /* pfx: none, 066h, 0f3h, 0f2h */
8581 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8582 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
8583 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8584 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8585 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8586 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8587 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
8588 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8589};
8590AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
8591
8592
8593/** Opcode 0x0f 0xc7. */
8594FNIEMOP_DEF(iemOp_Grp9)
8595{
8596 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8597 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8598 /* register, register */
8599 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8600 + pVCpu->iem.s.idxPrefix], bRm);
8601 /* memory, register */
8602 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8603 + pVCpu->iem.s.idxPrefix], bRm);
8604}
8605
8606
8607/**
8608 * Common 'bswap register' helper.
8609 */
8610FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
8611{
8612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8613 switch (pVCpu->iem.s.enmEffOpSize)
8614 {
8615 case IEMMODE_16BIT:
8616 IEM_MC_BEGIN(1, 0);
8617 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8618 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
8619 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
8620 IEM_MC_ADVANCE_RIP();
8621 IEM_MC_END();
8622 return VINF_SUCCESS;
8623
8624 case IEMMODE_32BIT:
8625 IEM_MC_BEGIN(1, 0);
8626 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8627 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8628 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8629 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
8630 IEM_MC_ADVANCE_RIP();
8631 IEM_MC_END();
8632 return VINF_SUCCESS;
8633
8634 case IEMMODE_64BIT:
8635 IEM_MC_BEGIN(1, 0);
8636 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8637 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8638 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
8639 IEM_MC_ADVANCE_RIP();
8640 IEM_MC_END();
8641 return VINF_SUCCESS;
8642
8643 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8644 }
8645}
8646
8647
8648/** Opcode 0x0f 0xc8. */
8649FNIEMOP_DEF(iemOp_bswap_rAX_r8)
8650{
8651 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
8652 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
8653 prefix. REX.B is the correct prefix it appears. For a parallel
8654 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
8655 IEMOP_HLP_MIN_486();
8656 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8657}
8658
8659
8660/** Opcode 0x0f 0xc9. */
8661FNIEMOP_DEF(iemOp_bswap_rCX_r9)
8662{
8663 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
8664 IEMOP_HLP_MIN_486();
8665 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8666}
8667
8668
8669/** Opcode 0x0f 0xca. */
8670FNIEMOP_DEF(iemOp_bswap_rDX_r10)
8671{
8672 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
8673 IEMOP_HLP_MIN_486();
8674 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8675}
8676
8677
8678/** Opcode 0x0f 0xcb. */
8679FNIEMOP_DEF(iemOp_bswap_rBX_r11)
8680{
8681 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
8682 IEMOP_HLP_MIN_486();
8683 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8684}
8685
8686
8687/** Opcode 0x0f 0xcc. */
8688FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8689{
8690 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8691 IEMOP_HLP_MIN_486();
8692 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8693}
8694
8695
8696/** Opcode 0x0f 0xcd. */
8697FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8698{
8699 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8700 IEMOP_HLP_MIN_486();
8701 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8702}
8703
8704
8705/** Opcode 0x0f 0xce. */
8706FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8707{
8708 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8709 IEMOP_HLP_MIN_486();
8710 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8711}
8712
8713
8714/** Opcode 0x0f 0xcf. */
8715FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8716{
8717 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8718 IEMOP_HLP_MIN_486();
8719 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8720}
8721
8722
8723/* Opcode 0x0f 0xd0 - invalid */
8724/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8725FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8726/* Opcode 0xf3 0x0f 0xd0 - invalid */
8727/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8728FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8729
8730/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8731FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8732/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8733FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8734/* Opcode 0xf3 0x0f 0xd1 - invalid */
8735/* Opcode 0xf2 0x0f 0xd1 - invalid */
8736
8737/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8738FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8739/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8740FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8741/* Opcode 0xf3 0x0f 0xd2 - invalid */
8742/* Opcode 0xf2 0x0f 0xd2 - invalid */
8743
8744/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8745FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8746/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8747FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8748/* Opcode 0xf3 0x0f 0xd3 - invalid */
8749/* Opcode 0xf2 0x0f 0xd3 - invalid */
8750
8751/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8752FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8753/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8754FNIEMOP_STUB(iemOp_paddq_Vx_W);
8755/* Opcode 0xf3 0x0f 0xd4 - invalid */
8756/* Opcode 0xf2 0x0f 0xd4 - invalid */
8757
8758/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8759FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8760/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8761FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8762/* Opcode 0xf3 0x0f 0xd5 - invalid */
8763/* Opcode 0xf2 0x0f 0xd5 - invalid */
8764
8765/* Opcode 0x0f 0xd6 - invalid */
8766
8767/**
8768 * @opcode 0xd6
8769 * @oppfx 0x66
8770 * @opcpuid sse2
8771 * @opgroup og_sse2_pcksclr_datamove
8772 * @opxcpttype none
8773 * @optest op1=-1 op2=2 -> op1=2
8774 * @optest op1=0 op2=-42 -> op1=-42
8775 */
8776FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8777{
8778 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8779 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8780 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8781 {
8782 /*
8783 * Register, register.
8784 */
8785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8786 IEM_MC_BEGIN(0, 2);
8787 IEM_MC_LOCAL(uint64_t, uSrc);
8788
8789 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8790 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8791
8792 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8793 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8794
8795 IEM_MC_ADVANCE_RIP();
8796 IEM_MC_END();
8797 }
8798 else
8799 {
8800 /*
8801 * Memory, register.
8802 */
8803 IEM_MC_BEGIN(0, 2);
8804 IEM_MC_LOCAL(uint64_t, uSrc);
8805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8806
8807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8809 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8810 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8811
8812 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8813 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8814
8815 IEM_MC_ADVANCE_RIP();
8816 IEM_MC_END();
8817 }
8818 return VINF_SUCCESS;
8819}
8820
8821
8822/**
8823 * @opcode 0xd6
8824 * @opcodesub 11 mr/reg
8825 * @oppfx f3
8826 * @opcpuid sse2
8827 * @opgroup og_sse2_simdint_datamove
8828 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8829 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8830 */
8831FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
8832{
8833 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8834 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8835 {
8836 /*
8837 * Register, register.
8838 */
8839 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8841 IEM_MC_BEGIN(0, 1);
8842 IEM_MC_LOCAL(uint64_t, uSrc);
8843
8844 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8845 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8846
8847 IEM_MC_FETCH_MREG_U64(uSrc, bRm & X86_MODRM_RM_MASK);
8848 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
8849 IEM_MC_FPU_TO_MMX_MODE();
8850
8851 IEM_MC_ADVANCE_RIP();
8852 IEM_MC_END();
8853 return VINF_SUCCESS;
8854 }
8855
8856 /**
8857 * @opdone
8858 * @opmnemonic udf30fd6mem
8859 * @opcode 0xd6
8860 * @opcodesub !11 mr/reg
8861 * @oppfx f3
8862 * @opunused intel-modrm
8863 * @opcpuid sse
8864 * @optest ->
8865 */
8866 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8867}
8868
8869
8870/**
8871 * @opcode 0xd6
8872 * @opcodesub 11 mr/reg
8873 * @oppfx f2
8874 * @opcpuid sse2
8875 * @opgroup og_sse2_simdint_datamove
8876 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8877 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8878 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
8879 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
8880 * @optest op1=-42 op2=0xfedcba9876543210
8881 * -> op1=0xfedcba9876543210 ftw=0xff
8882 */
8883FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
8884{
8885 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8886 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8887 {
8888 /*
8889 * Register, register.
8890 */
8891 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8893 IEM_MC_BEGIN(0, 1);
8894 IEM_MC_LOCAL(uint64_t, uSrc);
8895
8896 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8897 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8898
8899 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8900 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, uSrc);
8901 IEM_MC_FPU_TO_MMX_MODE();
8902
8903 IEM_MC_ADVANCE_RIP();
8904 IEM_MC_END();
8905 return VINF_SUCCESS;
8906 }
8907
8908 /**
8909 * @opdone
8910 * @opmnemonic udf20fd6mem
8911 * @opcode 0xd6
8912 * @opcodesub !11 mr/reg
8913 * @oppfx f2
8914 * @opunused intel-modrm
8915 * @opcpuid sse
8916 * @optest ->
8917 */
8918 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8919}
8920
8921/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
8922FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
8923{
8924 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8925 /** @todo testcase: Check that the instruction implicitly clears the high
8926 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8927 * and opcode modifications are made to work with the whole width (not
8928 * just 128). */
8929 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
8930 /* Docs says register only. */
8931 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8932 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8933 {
8934 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8935 IEM_MC_BEGIN(2, 0);
8936 IEM_MC_ARG(uint64_t *, pDst, 0);
8937 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8938 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8939 IEM_MC_PREPARE_FPU_USAGE();
8940 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8941 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8942 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8943 IEM_MC_ADVANCE_RIP();
8944 IEM_MC_END();
8945 return VINF_SUCCESS;
8946 }
8947 return IEMOP_RAISE_INVALID_OPCODE();
8948}
8949
8950/** Opcode 0x66 0x0f 0xd7 - */
8951FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
8952{
8953 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8954 /** @todo testcase: Check that the instruction implicitly clears the high
8955 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8956 * and opcode modifications are made to work with the whole width (not
8957 * just 128). */
8958 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
8959 /* Docs says register only. */
8960 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8961 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8962 {
8963 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8964 IEM_MC_BEGIN(2, 0);
8965 IEM_MC_ARG(uint64_t *, pDst, 0);
8966 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8967 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8968 IEM_MC_PREPARE_SSE_USAGE();
8969 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8970 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8971 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8972 IEM_MC_ADVANCE_RIP();
8973 IEM_MC_END();
8974 return VINF_SUCCESS;
8975 }
8976 return IEMOP_RAISE_INVALID_OPCODE();
8977}
8978
8979/* Opcode 0xf3 0x0f 0xd7 - invalid */
8980/* Opcode 0xf2 0x0f 0xd7 - invalid */
8981
8982
8983/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
8984FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
8985/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
8986FNIEMOP_STUB(iemOp_psubusb_Vx_W);
8987/* Opcode 0xf3 0x0f 0xd8 - invalid */
8988/* Opcode 0xf2 0x0f 0xd8 - invalid */
8989
8990/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
8991FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
8992/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
8993FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
8994/* Opcode 0xf3 0x0f 0xd9 - invalid */
8995/* Opcode 0xf2 0x0f 0xd9 - invalid */
8996
8997/** Opcode 0x0f 0xda - pminub Pq, Qq */
8998FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
8999/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
9000FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
9001/* Opcode 0xf3 0x0f 0xda - invalid */
9002/* Opcode 0xf2 0x0f 0xda - invalid */
9003
9004/** Opcode 0x0f 0xdb - pand Pq, Qq */
9005FNIEMOP_STUB(iemOp_pand_Pq_Qq);
9006/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
9007FNIEMOP_STUB(iemOp_pand_Vx_W);
9008/* Opcode 0xf3 0x0f 0xdb - invalid */
9009/* Opcode 0xf2 0x0f 0xdb - invalid */
9010
9011/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
9012FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
9013/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
9014FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
9015/* Opcode 0xf3 0x0f 0xdc - invalid */
9016/* Opcode 0xf2 0x0f 0xdc - invalid */
9017
9018/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
9019FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
9020/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
9021FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
9022/* Opcode 0xf3 0x0f 0xdd - invalid */
9023/* Opcode 0xf2 0x0f 0xdd - invalid */
9024
9025/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
9026FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
9027/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
9028FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
9029/* Opcode 0xf3 0x0f 0xde - invalid */
9030/* Opcode 0xf2 0x0f 0xde - invalid */
9031
9032/** Opcode 0x0f 0xdf - pandn Pq, Qq */
9033FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
9034/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
9035FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
9036/* Opcode 0xf3 0x0f 0xdf - invalid */
9037/* Opcode 0xf2 0x0f 0xdf - invalid */
9038
9039/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
9040FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
9041/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
9042FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
9043/* Opcode 0xf3 0x0f 0xe0 - invalid */
9044/* Opcode 0xf2 0x0f 0xe0 - invalid */
9045
9046/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
9047FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
9048/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
9049FNIEMOP_STUB(iemOp_psraw_Vx_W);
9050/* Opcode 0xf3 0x0f 0xe1 - invalid */
9051/* Opcode 0xf2 0x0f 0xe1 - invalid */
9052
9053/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
9054FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
9055/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
9056FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
9057/* Opcode 0xf3 0x0f 0xe2 - invalid */
9058/* Opcode 0xf2 0x0f 0xe2 - invalid */
9059
9060/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
9061FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
9062/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
9063FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
9064/* Opcode 0xf3 0x0f 0xe3 - invalid */
9065/* Opcode 0xf2 0x0f 0xe3 - invalid */
9066
9067/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
9068FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
9069/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
9070FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
9071/* Opcode 0xf3 0x0f 0xe4 - invalid */
9072/* Opcode 0xf2 0x0f 0xe4 - invalid */
9073
9074/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
9075FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
9076/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
9077FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
9078/* Opcode 0xf3 0x0f 0xe5 - invalid */
9079/* Opcode 0xf2 0x0f 0xe5 - invalid */
9080
9081/* Opcode 0x0f 0xe6 - invalid */
9082/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
9083FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
9084/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
9085FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
9086/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
9087FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
9088
9089
9090/**
9091 * @opcode 0xe7
9092 * @opcodesub !11 mr/reg
9093 * @oppfx none
9094 * @opcpuid sse
9095 * @opgroup og_sse1_cachect
9096 * @opxcpttype none
9097 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
9098 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
9099 */
9100FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
9101{
9102 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9103 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9104 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9105 {
9106 /* Register, memory. */
9107 IEM_MC_BEGIN(0, 2);
9108 IEM_MC_LOCAL(uint64_t, uSrc);
9109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9110
9111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9113 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
9114 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9115
9116 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9117 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9118 IEM_MC_FPU_TO_MMX_MODE();
9119
9120 IEM_MC_ADVANCE_RIP();
9121 IEM_MC_END();
9122 return VINF_SUCCESS;
9123 }
9124 /**
9125 * @opdone
9126 * @opmnemonic ud0fe7reg
9127 * @opcode 0xe7
9128 * @opcodesub 11 mr/reg
9129 * @oppfx none
9130 * @opunused immediate
9131 * @opcpuid sse
9132 * @optest ->
9133 */
9134 return IEMOP_RAISE_INVALID_OPCODE();
9135}
9136
9137/**
9138 * @opcode 0xe7
9139 * @opcodesub !11 mr/reg
9140 * @oppfx 0x66
9141 * @opcpuid sse2
9142 * @opgroup og_sse2_cachect
9143 * @opxcpttype 1
9144 * @optest op1=-1 op2=2 -> op1=2
9145 * @optest op1=0 op2=-42 -> op1=-42
9146 */
9147FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
9148{
9149 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9151 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9152 {
9153 /* Register, memory. */
9154 IEM_MC_BEGIN(0, 2);
9155 IEM_MC_LOCAL(RTUINT128U, uSrc);
9156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9157
9158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9160 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9161 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9162
9163 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9164 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9165
9166 IEM_MC_ADVANCE_RIP();
9167 IEM_MC_END();
9168 return VINF_SUCCESS;
9169 }
9170
9171 /**
9172 * @opdone
9173 * @opmnemonic ud660fe7reg
9174 * @opcode 0xe7
9175 * @opcodesub 11 mr/reg
9176 * @oppfx 0x66
9177 * @opunused immediate
9178 * @opcpuid sse
9179 * @optest ->
9180 */
9181 return IEMOP_RAISE_INVALID_OPCODE();
9182}
9183
9184/* Opcode 0xf3 0x0f 0xe7 - invalid */
9185/* Opcode 0xf2 0x0f 0xe7 - invalid */
9186
9187
9188/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
9189FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
9190/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
9191FNIEMOP_STUB(iemOp_psubsb_Vx_W);
9192/* Opcode 0xf3 0x0f 0xe8 - invalid */
9193/* Opcode 0xf2 0x0f 0xe8 - invalid */
9194
9195/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
9196FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
9197/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
9198FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
9199/* Opcode 0xf3 0x0f 0xe9 - invalid */
9200/* Opcode 0xf2 0x0f 0xe9 - invalid */
9201
9202/** Opcode 0x0f 0xea - pminsw Pq, Qq */
9203FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
9204/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
9205FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
9206/* Opcode 0xf3 0x0f 0xea - invalid */
9207/* Opcode 0xf2 0x0f 0xea - invalid */
9208
9209/** Opcode 0x0f 0xeb - por Pq, Qq */
9210FNIEMOP_STUB(iemOp_por_Pq_Qq);
9211/** Opcode 0x66 0x0f 0xeb - por Vx, W */
9212FNIEMOP_STUB(iemOp_por_Vx_W);
9213/* Opcode 0xf3 0x0f 0xeb - invalid */
9214/* Opcode 0xf2 0x0f 0xeb - invalid */
9215
9216/** Opcode 0x0f 0xec - paddsb Pq, Qq */
9217FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
9218/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
9219FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
9220/* Opcode 0xf3 0x0f 0xec - invalid */
9221/* Opcode 0xf2 0x0f 0xec - invalid */
9222
9223/** Opcode 0x0f 0xed - paddsw Pq, Qq */
9224FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
9225/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
9226FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
9227/* Opcode 0xf3 0x0f 0xed - invalid */
9228/* Opcode 0xf2 0x0f 0xed - invalid */
9229
9230/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
9231FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
9232/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
9233FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
9234/* Opcode 0xf3 0x0f 0xee - invalid */
9235/* Opcode 0xf2 0x0f 0xee - invalid */
9236
9237
9238/** Opcode 0x0f 0xef - pxor Pq, Qq */
9239FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
9240{
9241 IEMOP_MNEMONIC(pxor, "pxor");
9242 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
9243}
9244
9245/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
9246FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
9247{
9248 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
9249 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
9250}
9251
9252/* Opcode 0xf3 0x0f 0xef - invalid */
9253/* Opcode 0xf2 0x0f 0xef - invalid */
9254
9255/* Opcode 0x0f 0xf0 - invalid */
9256/* Opcode 0x66 0x0f 0xf0 - invalid */
9257/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
9258FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
9259
9260/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
9261FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
9262/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
9263FNIEMOP_STUB(iemOp_psllw_Vx_W);
9264/* Opcode 0xf2 0x0f 0xf1 - invalid */
9265
9266/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
9267FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
9268/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
9269FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
9270/* Opcode 0xf2 0x0f 0xf2 - invalid */
9271
9272/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
9273FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
9274/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
9275FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
9276/* Opcode 0xf2 0x0f 0xf3 - invalid */
9277
9278/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
9279FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
9280/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
9281FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
9282/* Opcode 0xf2 0x0f 0xf4 - invalid */
9283
9284/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
9285FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
9286/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
9287FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
9288/* Opcode 0xf2 0x0f 0xf5 - invalid */
9289
9290/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
9291FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
9292/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
9293FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
9294/* Opcode 0xf2 0x0f 0xf6 - invalid */
9295
9296/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
9297FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
9298/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
9299FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
9300/* Opcode 0xf2 0x0f 0xf7 - invalid */
9301
9302/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
9303FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
9304/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
9305FNIEMOP_STUB(iemOp_psubb_Vx_W);
9306/* Opcode 0xf2 0x0f 0xf8 - invalid */
9307
9308/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
9309FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
9310/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
9311FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
9312/* Opcode 0xf2 0x0f 0xf9 - invalid */
9313
9314/** Opcode 0x0f 0xfa - psubd Pq, Qq */
9315FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
9316/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
9317FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
9318/* Opcode 0xf2 0x0f 0xfa - invalid */
9319
9320/** Opcode 0x0f 0xfb - psubq Pq, Qq */
9321FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
9322/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
9323FNIEMOP_STUB(iemOp_psubq_Vx_W);
9324/* Opcode 0xf2 0x0f 0xfb - invalid */
9325
9326/** Opcode 0x0f 0xfc - paddb Pq, Qq */
9327FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
9328/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
9329FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
9330/* Opcode 0xf2 0x0f 0xfc - invalid */
9331
9332/** Opcode 0x0f 0xfd - paddw Pq, Qq */
9333FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
9334/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
9335FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
9336/* Opcode 0xf2 0x0f 0xfd - invalid */
9337
9338/** Opcode 0x0f 0xfe - paddd Pq, Qq */
9339FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
9340/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
9341FNIEMOP_STUB(iemOp_paddd_Vx_W);
9342/* Opcode 0xf2 0x0f 0xfe - invalid */
9343
9344
9345/** Opcode **** 0x0f 0xff - UD0 */
9346FNIEMOP_DEF(iemOp_ud0)
9347{
9348 IEMOP_MNEMONIC(ud0, "ud0");
9349 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
9350 {
9351 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
9352#ifndef TST_IEM_CHECK_MC
9353 RTGCPTR GCPtrEff;
9354 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
9355 if (rcStrict != VINF_SUCCESS)
9356 return rcStrict;
9357#endif
9358 IEMOP_HLP_DONE_DECODING();
9359 }
9360 return IEMOP_RAISE_INVALID_OPCODE();
9361}
9362
9363
9364
9365/**
9366 * Two byte opcode map, first byte 0x0f.
9367 *
9368 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
9369 * check if it needs updating as well when making changes.
9370 */
9371IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
9372{
9373 /* no prefix, 066h prefix f3h prefix, f2h prefix */
9374 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
9375 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
9376 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
9377 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
9378 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
9379 /* 0x05 */ IEMOP_X4(iemOp_syscall),
9380 /* 0x06 */ IEMOP_X4(iemOp_clts),
9381 /* 0x07 */ IEMOP_X4(iemOp_sysret),
9382 /* 0x08 */ IEMOP_X4(iemOp_invd),
9383 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
9384 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
9385 /* 0x0b */ IEMOP_X4(iemOp_ud2),
9386 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
9387 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
9388 /* 0x0e */ IEMOP_X4(iemOp_femms),
9389 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
9390
9391 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
9392 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
9393 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
9394 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9395 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9396 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9397 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
9398 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9399 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
9400 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
9401 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
9402 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
9403 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
9404 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
9405 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
9406 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
9407
9408 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
9409 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
9410 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
9411 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
9412 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
9413 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9414 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
9415 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9416 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9417 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9418 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
9419 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9420 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
9421 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
9422 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9423 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9424
9425 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
9426 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
9427 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
9428 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
9429 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
9430 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
9431 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
9432 /* 0x37 */ IEMOP_X4(iemOp_getsec),
9433 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
9434 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9435 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
9436 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9437 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9438 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9439 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9440 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9441
9442 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
9443 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
9444 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
9445 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
9446 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
9447 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
9448 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
9449 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
9450 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
9451 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
9452 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
9453 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
9454 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
9455 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
9456 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
9457 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
9458
9459 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9460 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
9461 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
9462 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
9463 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9464 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9465 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9466 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9467 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
9468 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
9469 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
9470 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
9471 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
9472 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
9473 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
9474 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
9475
9476 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9477 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9478 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9479 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9480 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9481 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9482 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9483 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9484 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9485 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9486 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9487 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9488 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9489 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9490 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9491 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
9492
9493 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
9494 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
9495 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
9496 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
9497 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9498 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9499 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9500 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9501
9502 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9503 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9504 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9505 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9506 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
9507 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
9508 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
9509 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
9510
9511 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
9512 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
9513 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
9514 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
9515 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
9516 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
9517 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
9518 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
9519 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
9520 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
9521 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
9522 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
9523 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
9524 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
9525 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
9526 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
9527
9528 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
9529 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
9530 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
9531 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
9532 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
9533 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
9534 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
9535 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
9536 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
9537 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
9538 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
9539 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
9540 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
9541 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
9542 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
9543 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
9544
9545 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
9546 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
9547 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
9548 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
9549 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
9550 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
9551 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
9552 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
9553 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
9554 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
9555 /* 0xaa */ IEMOP_X4(iemOp_rsm),
9556 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
9557 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
9558 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
9559 /* 0xae */ IEMOP_X4(iemOp_Grp15),
9560 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
9561
9562 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
9563 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
9564 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
9565 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
9566 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
9567 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
9568 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
9569 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
9570 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
9571 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
9572 /* 0xba */ IEMOP_X4(iemOp_Grp8),
9573 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
9574 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
9575 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
9576 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
9577 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
9578
9579 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
9580 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
9581 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
9582 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9583 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9584 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9585 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9586 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
9587 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
9588 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
9589 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
9590 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
9591 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
9592 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
9593 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
9594 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
9595
9596 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
9597 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9598 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9599 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9600 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9601 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9602 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
9603 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9604 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9605 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9606 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9607 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9608 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9609 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9610 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9611 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9612
9613 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9614 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9615 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9616 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9617 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9618 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9619 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
9620 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9621 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9622 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9623 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9624 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9625 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9626 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9627 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9628 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9629
9630 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
9631 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9632 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9633 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9634 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9635 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9636 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9637 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9638 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9639 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9640 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9641 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9642 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9643 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9644 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9645 /* 0xff */ IEMOP_X4(iemOp_ud0),
9646};
9647AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
9648
9649/** @} */
9650
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette