VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 72506

Last change on this file since 72506 was 72505, checked in by vboxsync, 7 years ago

IEM: Made smsw use CImpl so to better facilitate SVM intercepts.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 335.2 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 72505 2018-06-11 12:05:40Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
38 switch (pVCpu->iem.s.enmEffOpSize)
39 {
40 case IEMMODE_16BIT:
41 IEM_MC_BEGIN(0, 1);
42 IEM_MC_LOCAL(uint16_t, u16Ldtr);
43 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
44 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
45 IEM_MC_ADVANCE_RIP();
46 IEM_MC_END();
47 break;
48
49 case IEMMODE_32BIT:
50 IEM_MC_BEGIN(0, 1);
51 IEM_MC_LOCAL(uint32_t, u32Ldtr);
52 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
53 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
54 IEM_MC_ADVANCE_RIP();
55 IEM_MC_END();
56 break;
57
58 case IEMMODE_64BIT:
59 IEM_MC_BEGIN(0, 1);
60 IEM_MC_LOCAL(uint64_t, u64Ldtr);
61 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
62 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
63 IEM_MC_ADVANCE_RIP();
64 IEM_MC_END();
65 break;
66
67 IEM_NOT_REACHED_DEFAULT_CASE_RET();
68 }
69 }
70 else
71 {
72 IEM_MC_BEGIN(0, 2);
73 IEM_MC_LOCAL(uint16_t, u16Ldtr);
74 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
75 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
76 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
77 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
78 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
79 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/** Opcode 0x0f 0x00 /1. */
88FNIEMOPRM_DEF(iemOp_Grp6_str)
89{
90 IEMOP_MNEMONIC(str, "str Rv/Mw");
91 IEMOP_HLP_MIN_286();
92 IEMOP_HLP_NO_REAL_OR_V86_MODE();
93
94 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
95 {
96 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
97 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
98 switch (pVCpu->iem.s.enmEffOpSize)
99 {
100 case IEMMODE_16BIT:
101 IEM_MC_BEGIN(0, 1);
102 IEM_MC_LOCAL(uint16_t, u16Tr);
103 IEM_MC_FETCH_TR_U16(u16Tr);
104 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
105 IEM_MC_ADVANCE_RIP();
106 IEM_MC_END();
107 break;
108
109 case IEMMODE_32BIT:
110 IEM_MC_BEGIN(0, 1);
111 IEM_MC_LOCAL(uint32_t, u32Tr);
112 IEM_MC_FETCH_TR_U32(u32Tr);
113 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
114 IEM_MC_ADVANCE_RIP();
115 IEM_MC_END();
116 break;
117
118 case IEMMODE_64BIT:
119 IEM_MC_BEGIN(0, 1);
120 IEM_MC_LOCAL(uint64_t, u64Tr);
121 IEM_MC_FETCH_TR_U64(u64Tr);
122 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
123 IEM_MC_ADVANCE_RIP();
124 IEM_MC_END();
125 break;
126
127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
128 }
129 }
130 else
131 {
132 IEM_MC_BEGIN(0, 2);
133 IEM_MC_LOCAL(uint16_t, u16Tr);
134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
136 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
137 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
138 IEM_MC_FETCH_TR_U16(u16Tr);
139 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
140 IEM_MC_ADVANCE_RIP();
141 IEM_MC_END();
142 }
143 return VINF_SUCCESS;
144}
145
146
147/** Opcode 0x0f 0x00 /2. */
148FNIEMOPRM_DEF(iemOp_Grp6_lldt)
149{
150 IEMOP_MNEMONIC(lldt, "lldt Ew");
151 IEMOP_HLP_MIN_286();
152 IEMOP_HLP_NO_REAL_OR_V86_MODE();
153
154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
155 {
156 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
157 IEM_MC_BEGIN(1, 0);
158 IEM_MC_ARG(uint16_t, u16Sel, 0);
159 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
160 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
161 IEM_MC_END();
162 }
163 else
164 {
165 IEM_MC_BEGIN(1, 1);
166 IEM_MC_ARG(uint16_t, u16Sel, 0);
167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
169 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
170 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
171 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
172 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
173 IEM_MC_END();
174 }
175 return VINF_SUCCESS;
176}
177
178
179/** Opcode 0x0f 0x00 /3. */
180FNIEMOPRM_DEF(iemOp_Grp6_ltr)
181{
182 IEMOP_MNEMONIC(ltr, "ltr Ew");
183 IEMOP_HLP_MIN_286();
184 IEMOP_HLP_NO_REAL_OR_V86_MODE();
185
186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
187 {
188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
189 IEM_MC_BEGIN(1, 0);
190 IEM_MC_ARG(uint16_t, u16Sel, 0);
191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
192 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
193 IEM_MC_END();
194 }
195 else
196 {
197 IEM_MC_BEGIN(1, 1);
198 IEM_MC_ARG(uint16_t, u16Sel, 0);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
202 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
203 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
204 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
205 IEM_MC_END();
206 }
207 return VINF_SUCCESS;
208}
209
210
211/** Opcode 0x0f 0x00 /3. */
212FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
213{
214 IEMOP_HLP_MIN_286();
215 IEMOP_HLP_NO_REAL_OR_V86_MODE();
216
217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
218 {
219 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
220 IEM_MC_BEGIN(2, 0);
221 IEM_MC_ARG(uint16_t, u16Sel, 0);
222 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
223 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
224 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
225 IEM_MC_END();
226 }
227 else
228 {
229 IEM_MC_BEGIN(2, 1);
230 IEM_MC_ARG(uint16_t, u16Sel, 0);
231 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
234 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
235 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
236 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
237 IEM_MC_END();
238 }
239 return VINF_SUCCESS;
240}
241
242
243/** Opcode 0x0f 0x00 /4. */
244FNIEMOPRM_DEF(iemOp_Grp6_verr)
245{
246 IEMOP_MNEMONIC(verr, "verr Ew");
247 IEMOP_HLP_MIN_286();
248 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
249}
250
251
252/** Opcode 0x0f 0x00 /5. */
253FNIEMOPRM_DEF(iemOp_Grp6_verw)
254{
255 IEMOP_MNEMONIC(verw, "verw Ew");
256 IEMOP_HLP_MIN_286();
257 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
258}
259
260
261/**
262 * Group 6 jump table.
263 */
264IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
265{
266 iemOp_Grp6_sldt,
267 iemOp_Grp6_str,
268 iemOp_Grp6_lldt,
269 iemOp_Grp6_ltr,
270 iemOp_Grp6_verr,
271 iemOp_Grp6_verw,
272 iemOp_InvalidWithRM,
273 iemOp_InvalidWithRM
274};
275
276/** Opcode 0x0f 0x00. */
277FNIEMOP_DEF(iemOp_Grp6)
278{
279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
280 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
281}
282
283
284/** Opcode 0x0f 0x01 /0. */
285FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
286{
287 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
288 IEMOP_HLP_MIN_286();
289 IEMOP_HLP_64BIT_OP_SIZE();
290 IEM_MC_BEGIN(2, 1);
291 IEM_MC_ARG(uint8_t, iEffSeg, 0);
292 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
295 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
296 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
297 IEM_MC_END();
298 return VINF_SUCCESS;
299}
300
301
302/** Opcode 0x0f 0x01 /0. */
303FNIEMOP_DEF(iemOp_Grp7_vmcall)
304{
305 IEMOP_MNEMONIC(vmcall, "vmcall");
306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
307
308 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
309 want all hypercalls regardless of instruction used, and if a
310 hypercall isn't handled by GIM or HMSvm will raise an #UD.
311 (NEM/win makes ASSUMPTIONS about this behavior.) */
312 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
313}
314
315
316/** Opcode 0x0f 0x01 /0. */
317FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
318{
319 IEMOP_BITCH_ABOUT_STUB();
320 return IEMOP_RAISE_INVALID_OPCODE();
321}
322
323
324/** Opcode 0x0f 0x01 /0. */
325FNIEMOP_DEF(iemOp_Grp7_vmresume)
326{
327 IEMOP_BITCH_ABOUT_STUB();
328 return IEMOP_RAISE_INVALID_OPCODE();
329}
330
331
332/** Opcode 0x0f 0x01 /0. */
333FNIEMOP_DEF(iemOp_Grp7_vmxoff)
334{
335 IEMOP_BITCH_ABOUT_STUB();
336 return IEMOP_RAISE_INVALID_OPCODE();
337}
338
339
340/** Opcode 0x0f 0x01 /1. */
341FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
342{
343 IEMOP_MNEMONIC(sidt, "sidt Ms");
344 IEMOP_HLP_MIN_286();
345 IEMOP_HLP_64BIT_OP_SIZE();
346 IEM_MC_BEGIN(2, 1);
347 IEM_MC_ARG(uint8_t, iEffSeg, 0);
348 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
351 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
352 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
353 IEM_MC_END();
354 return VINF_SUCCESS;
355}
356
357
358/** Opcode 0x0f 0x01 /1. */
359FNIEMOP_DEF(iemOp_Grp7_monitor)
360{
361 IEMOP_MNEMONIC(monitor, "monitor");
362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
363 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
364}
365
366
367/** Opcode 0x0f 0x01 /1. */
368FNIEMOP_DEF(iemOp_Grp7_mwait)
369{
370 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
372 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
373}
374
375
376/** Opcode 0x0f 0x01 /2. */
377FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
378{
379 IEMOP_MNEMONIC(lgdt, "lgdt");
380 IEMOP_HLP_64BIT_OP_SIZE();
381 IEM_MC_BEGIN(3, 1);
382 IEM_MC_ARG(uint8_t, iEffSeg, 0);
383 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
384 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
387 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
388 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
389 IEM_MC_END();
390 return VINF_SUCCESS;
391}
392
393
394/** Opcode 0x0f 0x01 0xd0. */
395FNIEMOP_DEF(iemOp_Grp7_xgetbv)
396{
397 IEMOP_MNEMONIC(xgetbv, "xgetbv");
398 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
399 {
400 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
401 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
402 }
403 return IEMOP_RAISE_INVALID_OPCODE();
404}
405
406
407/** Opcode 0x0f 0x01 0xd1. */
408FNIEMOP_DEF(iemOp_Grp7_xsetbv)
409{
410 IEMOP_MNEMONIC(xsetbv, "xsetbv");
411 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
412 {
413 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
414 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
415 }
416 return IEMOP_RAISE_INVALID_OPCODE();
417}
418
419
420/** Opcode 0x0f 0x01 /3. */
421FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
422{
423 IEMOP_MNEMONIC(lidt, "lidt");
424 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
425 ? IEMMODE_64BIT
426 : pVCpu->iem.s.enmEffOpSize;
427 IEM_MC_BEGIN(3, 1);
428 IEM_MC_ARG(uint8_t, iEffSeg, 0);
429 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
430 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
433 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
434 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
435 IEM_MC_END();
436 return VINF_SUCCESS;
437}
438
439
440/** Opcode 0x0f 0x01 0xd8. */
441#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
442FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
443{
444 IEMOP_MNEMONIC(vmrun, "vmrun");
445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
447}
448#else
449FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
450#endif
451
452/** Opcode 0x0f 0x01 0xd9. */
453FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
454{
455 IEMOP_MNEMONIC(vmmcall, "vmmcall");
456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
457
458 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
459 want all hypercalls regardless of instruction used, and if a
460 hypercall isn't handled by GIM or HMSvm will raise an #UD.
461 (NEM/win makes ASSUMPTIONS about this behavior.) */
462 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
463}
464
465/** Opcode 0x0f 0x01 0xda. */
466#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
467FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
468{
469 IEMOP_MNEMONIC(vmload, "vmload");
470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
471 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
472}
473#else
474FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
475#endif
476
477
478/** Opcode 0x0f 0x01 0xdb. */
479#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
480FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
481{
482 IEMOP_MNEMONIC(vmsave, "vmsave");
483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
484 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
485}
486#else
487FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
488#endif
489
490
491/** Opcode 0x0f 0x01 0xdc. */
492#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
493FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
494{
495 IEMOP_MNEMONIC(stgi, "stgi");
496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
497 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
498}
499#else
500FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
501#endif
502
503
504/** Opcode 0x0f 0x01 0xdd. */
505#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
506FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
507{
508 IEMOP_MNEMONIC(clgi, "clgi");
509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
510 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
511}
512#else
513FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
514#endif
515
516
517/** Opcode 0x0f 0x01 0xdf. */
518#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
519FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
520{
521 IEMOP_MNEMONIC(invlpga, "invlpga");
522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
523 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
524}
525#else
526FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
527#endif
528
529
530/** Opcode 0x0f 0x01 0xde. */
531#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
532FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
533{
534 IEMOP_MNEMONIC(skinit, "skinit");
535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
536 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
537}
538#else
539FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
540#endif
541
542
543/** Opcode 0x0f 0x01 /4. */
544FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
545{
546 IEMOP_MNEMONIC(smsw, "smsw");
547 IEMOP_HLP_MIN_286();
548 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
549 {
550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
551 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
552 }
553
554 /* Ignore operand size here, memory refs are always 16-bit. */
555 IEM_MC_BEGIN(2, 0);
556 IEM_MC_ARG(uint16_t, iEffSeg, 0);
557 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
560 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
561 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
562 IEM_MC_END();
563 return VINF_SUCCESS;
564}
565
566
567/** Opcode 0x0f 0x01 /6. */
568FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
569{
570 /* The operand size is effectively ignored, all is 16-bit and only the
571 lower 3-bits are used. */
572 IEMOP_MNEMONIC(lmsw, "lmsw");
573 IEMOP_HLP_MIN_286();
574 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
575 {
576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
577 IEM_MC_BEGIN(1, 0);
578 IEM_MC_ARG(uint16_t, u16Tmp, 0);
579 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
580 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
581 IEM_MC_END();
582 }
583 else
584 {
585 IEM_MC_BEGIN(1, 1);
586 IEM_MC_ARG(uint16_t, u16Tmp, 0);
587 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
590 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
591 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
592 IEM_MC_END();
593 }
594 return VINF_SUCCESS;
595}
596
597
598/** Opcode 0x0f 0x01 /7. */
599FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
600{
601 IEMOP_MNEMONIC(invlpg, "invlpg");
602 IEMOP_HLP_MIN_486();
603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
604 IEM_MC_BEGIN(1, 1);
605 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
607 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
608 IEM_MC_END();
609 return VINF_SUCCESS;
610}
611
612
613/** Opcode 0x0f 0x01 /7. */
614FNIEMOP_DEF(iemOp_Grp7_swapgs)
615{
616 IEMOP_MNEMONIC(swapgs, "swapgs");
617 IEMOP_HLP_ONLY_64BIT();
618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
619 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
620}
621
622
623/** Opcode 0x0f 0x01 /7. */
624FNIEMOP_DEF(iemOp_Grp7_rdtscp)
625{
626 IEMOP_MNEMONIC(rdtscp, "rdtscp");
627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
628 /** @todo SVM intercept removal from here. */
629 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_RDTSCP, SVM_EXIT_RDTSCP, 0, 0);
630 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
631}
632
633
634/**
635 * Group 7 jump table, memory variant.
636 */
637IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
638{
639 iemOp_Grp7_sgdt,
640 iemOp_Grp7_sidt,
641 iemOp_Grp7_lgdt,
642 iemOp_Grp7_lidt,
643 iemOp_Grp7_smsw,
644 iemOp_InvalidWithRM,
645 iemOp_Grp7_lmsw,
646 iemOp_Grp7_invlpg
647};
648
649
650/** Opcode 0x0f 0x01. */
651FNIEMOP_DEF(iemOp_Grp7)
652{
653 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
654 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
655 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
656
657 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
658 {
659 case 0:
660 switch (bRm & X86_MODRM_RM_MASK)
661 {
662 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
663 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
664 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
665 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
666 }
667 return IEMOP_RAISE_INVALID_OPCODE();
668
669 case 1:
670 switch (bRm & X86_MODRM_RM_MASK)
671 {
672 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
673 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
674 }
675 return IEMOP_RAISE_INVALID_OPCODE();
676
677 case 2:
678 switch (bRm & X86_MODRM_RM_MASK)
679 {
680 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
681 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
682 }
683 return IEMOP_RAISE_INVALID_OPCODE();
684
685 case 3:
686 switch (bRm & X86_MODRM_RM_MASK)
687 {
688 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
689 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
690 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
691 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
692 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
693 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
694 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
695 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
696 IEM_NOT_REACHED_DEFAULT_CASE_RET();
697 }
698
699 case 4:
700 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
701
702 case 5:
703 return IEMOP_RAISE_INVALID_OPCODE();
704
705 case 6:
706 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
707
708 case 7:
709 switch (bRm & X86_MODRM_RM_MASK)
710 {
711 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
712 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
713 }
714 return IEMOP_RAISE_INVALID_OPCODE();
715
716 IEM_NOT_REACHED_DEFAULT_CASE_RET();
717 }
718}
719
720/** Opcode 0x0f 0x00 /3. */
721FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
722{
723 IEMOP_HLP_NO_REAL_OR_V86_MODE();
724 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
725
726 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
727 {
728 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
729 switch (pVCpu->iem.s.enmEffOpSize)
730 {
731 case IEMMODE_16BIT:
732 {
733 IEM_MC_BEGIN(3, 0);
734 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
735 IEM_MC_ARG(uint16_t, u16Sel, 1);
736 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
737
738 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
739 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
740 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
741
742 IEM_MC_END();
743 return VINF_SUCCESS;
744 }
745
746 case IEMMODE_32BIT:
747 case IEMMODE_64BIT:
748 {
749 IEM_MC_BEGIN(3, 0);
750 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
751 IEM_MC_ARG(uint16_t, u16Sel, 1);
752 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
753
754 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
755 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
756 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
757
758 IEM_MC_END();
759 return VINF_SUCCESS;
760 }
761
762 IEM_NOT_REACHED_DEFAULT_CASE_RET();
763 }
764 }
765 else
766 {
767 switch (pVCpu->iem.s.enmEffOpSize)
768 {
769 case IEMMODE_16BIT:
770 {
771 IEM_MC_BEGIN(3, 1);
772 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
773 IEM_MC_ARG(uint16_t, u16Sel, 1);
774 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
775 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
776
777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
778 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
779
780 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
781 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
782 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
783
784 IEM_MC_END();
785 return VINF_SUCCESS;
786 }
787
788 case IEMMODE_32BIT:
789 case IEMMODE_64BIT:
790 {
791 IEM_MC_BEGIN(3, 1);
792 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
793 IEM_MC_ARG(uint16_t, u16Sel, 1);
794 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
796
797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
798 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
799/** @todo testcase: make sure it's a 16-bit read. */
800
801 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
802 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
803 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
804
805 IEM_MC_END();
806 return VINF_SUCCESS;
807 }
808
809 IEM_NOT_REACHED_DEFAULT_CASE_RET();
810 }
811 }
812}
813
814
815
816/** Opcode 0x0f 0x02. */
817FNIEMOP_DEF(iemOp_lar_Gv_Ew)
818{
819 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
820 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
821}
822
823
824/** Opcode 0x0f 0x03. */
825FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
826{
827 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
828 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
829}
830
831
832/** Opcode 0x0f 0x05. */
833FNIEMOP_DEF(iemOp_syscall)
834{
835 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
837 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
838}
839
840
841/** Opcode 0x0f 0x06. */
842FNIEMOP_DEF(iemOp_clts)
843{
844 IEMOP_MNEMONIC(clts, "clts");
845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
846 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
847}
848
849
850/** Opcode 0x0f 0x07. */
851FNIEMOP_DEF(iemOp_sysret)
852{
853 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
855 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
856}
857
858
859/** Opcode 0x0f 0x08. */
860FNIEMOP_DEF(iemOp_invd)
861{
862 IEMOP_MNEMONIC(invd, "invd");
863#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
864 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
865 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_INVD, SVM_EXIT_INVD, 0, 0);
866#else
867 RT_NOREF_PV(pVCpu);
868#endif
869 /** @todo implement invd for the regular case (above only handles nested SVM
870 * exits). */
871 IEMOP_BITCH_ABOUT_STUB();
872 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
873}
874
875// IEMOP_HLP_MIN_486();
876
877
878/** Opcode 0x0f 0x09. */
879FNIEMOP_DEF(iemOp_wbinvd)
880{
881 IEMOP_MNEMONIC(wbinvd, "wbinvd");
882 IEMOP_HLP_MIN_486();
883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
884 IEM_MC_BEGIN(0, 0);
885 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
886 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_WBINVD, SVM_EXIT_WBINVD, 0, 0);
887 IEM_MC_ADVANCE_RIP();
888 IEM_MC_END();
889 return VINF_SUCCESS; /* ignore for now */
890}
891
892
893/** Opcode 0x0f 0x0b. */
894FNIEMOP_DEF(iemOp_ud2)
895{
896 IEMOP_MNEMONIC(ud2, "ud2");
897 return IEMOP_RAISE_INVALID_OPCODE();
898}
899
900/** Opcode 0x0f 0x0d. */
901FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
902{
903 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
904 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
905 {
906 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
907 return IEMOP_RAISE_INVALID_OPCODE();
908 }
909
910 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
911 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
912 {
913 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
914 return IEMOP_RAISE_INVALID_OPCODE();
915 }
916
917 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
918 {
919 case 2: /* Aliased to /0 for the time being. */
920 case 4: /* Aliased to /0 for the time being. */
921 case 5: /* Aliased to /0 for the time being. */
922 case 6: /* Aliased to /0 for the time being. */
923 case 7: /* Aliased to /0 for the time being. */
924 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
925 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
926 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
927 IEM_NOT_REACHED_DEFAULT_CASE_RET();
928 }
929
930 IEM_MC_BEGIN(0, 1);
931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
934 /* Currently a NOP. */
935 NOREF(GCPtrEffSrc);
936 IEM_MC_ADVANCE_RIP();
937 IEM_MC_END();
938 return VINF_SUCCESS;
939}
940
941
942/** Opcode 0x0f 0x0e. */
943FNIEMOP_DEF(iemOp_femms)
944{
945 IEMOP_MNEMONIC(femms, "femms");
946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
947
948 IEM_MC_BEGIN(0,0);
949 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
950 IEM_MC_MAYBE_RAISE_FPU_XCPT();
951 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
952 IEM_MC_FPU_FROM_MMX_MODE();
953 IEM_MC_ADVANCE_RIP();
954 IEM_MC_END();
955 return VINF_SUCCESS;
956}
957
958
959/** Opcode 0x0f 0x0f. */
960FNIEMOP_DEF(iemOp_3Dnow)
961{
962 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
963 {
964 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
965 return IEMOP_RAISE_INVALID_OPCODE();
966 }
967
968#ifdef IEM_WITH_3DNOW
969 /* This is pretty sparse, use switch instead of table. */
970 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
971 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
972#else
973 IEMOP_BITCH_ABOUT_STUB();
974 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
975#endif
976}
977
978
979/**
980 * @opcode 0x10
981 * @oppfx none
982 * @opcpuid sse
983 * @opgroup og_sse_simdfp_datamove
984 * @opxcpttype 4UA
985 * @optest op1=1 op2=2 -> op1=2
986 * @optest op1=0 op2=-22 -> op1=-22
987 */
988FNIEMOP_DEF(iemOp_movups_Vps_Wps)
989{
990 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
991 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
992 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
993 {
994 /*
995 * Register, register.
996 */
997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
998 IEM_MC_BEGIN(0, 0);
999 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1000 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1001 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1002 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1003 IEM_MC_ADVANCE_RIP();
1004 IEM_MC_END();
1005 }
1006 else
1007 {
1008 /*
1009 * Memory, register.
1010 */
1011 IEM_MC_BEGIN(0, 2);
1012 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1014
1015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1017 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1018 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1019
1020 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1021 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1022
1023 IEM_MC_ADVANCE_RIP();
1024 IEM_MC_END();
1025 }
1026 return VINF_SUCCESS;
1027
1028}
1029
1030
1031/**
1032 * @opcode 0x10
1033 * @oppfx 0x66
1034 * @opcpuid sse2
1035 * @opgroup og_sse2_pcksclr_datamove
1036 * @opxcpttype 4UA
1037 * @optest op1=1 op2=2 -> op1=2
1038 * @optest op1=0 op2=-42 -> op1=-42
1039 */
1040FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1041{
1042 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1043 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1044 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1045 {
1046 /*
1047 * Register, register.
1048 */
1049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1050 IEM_MC_BEGIN(0, 0);
1051 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1052 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1053 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1054 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1055 IEM_MC_ADVANCE_RIP();
1056 IEM_MC_END();
1057 }
1058 else
1059 {
1060 /*
1061 * Memory, register.
1062 */
1063 IEM_MC_BEGIN(0, 2);
1064 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1065 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1066
1067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1069 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1070 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1071
1072 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1073 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1074
1075 IEM_MC_ADVANCE_RIP();
1076 IEM_MC_END();
1077 }
1078 return VINF_SUCCESS;
1079}
1080
1081
1082/**
1083 * @opcode 0x10
1084 * @oppfx 0xf3
1085 * @opcpuid sse
1086 * @opgroup og_sse_simdfp_datamove
1087 * @opxcpttype 5
1088 * @optest op1=1 op2=2 -> op1=2
1089 * @optest op1=0 op2=-22 -> op1=-22
1090 */
1091FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1092{
1093 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1094 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1095 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1096 {
1097 /*
1098 * Register, register.
1099 */
1100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1101 IEM_MC_BEGIN(0, 1);
1102 IEM_MC_LOCAL(uint32_t, uSrc);
1103
1104 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1105 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1106 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1107 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1108
1109 IEM_MC_ADVANCE_RIP();
1110 IEM_MC_END();
1111 }
1112 else
1113 {
1114 /*
1115 * Memory, register.
1116 */
1117 IEM_MC_BEGIN(0, 2);
1118 IEM_MC_LOCAL(uint32_t, uSrc);
1119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1120
1121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1123 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1124 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1125
1126 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1127 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1128
1129 IEM_MC_ADVANCE_RIP();
1130 IEM_MC_END();
1131 }
1132 return VINF_SUCCESS;
1133}
1134
1135
1136/**
1137 * @opcode 0x10
1138 * @oppfx 0xf2
1139 * @opcpuid sse2
1140 * @opgroup og_sse2_pcksclr_datamove
1141 * @opxcpttype 5
1142 * @optest op1=1 op2=2 -> op1=2
1143 * @optest op1=0 op2=-42 -> op1=-42
1144 */
1145FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1146{
1147 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1148 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1149 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1150 {
1151 /*
1152 * Register, register.
1153 */
1154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1155 IEM_MC_BEGIN(0, 1);
1156 IEM_MC_LOCAL(uint64_t, uSrc);
1157
1158 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1159 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1160 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1161 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1162
1163 IEM_MC_ADVANCE_RIP();
1164 IEM_MC_END();
1165 }
1166 else
1167 {
1168 /*
1169 * Memory, register.
1170 */
1171 IEM_MC_BEGIN(0, 2);
1172 IEM_MC_LOCAL(uint64_t, uSrc);
1173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1174
1175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1177 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1178 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1179
1180 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1181 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1182
1183 IEM_MC_ADVANCE_RIP();
1184 IEM_MC_END();
1185 }
1186 return VINF_SUCCESS;
1187}
1188
1189
1190/**
1191 * @opcode 0x11
1192 * @oppfx none
1193 * @opcpuid sse
1194 * @opgroup og_sse_simdfp_datamove
1195 * @opxcpttype 4UA
1196 * @optest op1=1 op2=2 -> op1=2
1197 * @optest op1=0 op2=-42 -> op1=-42
1198 */
1199FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1200{
1201 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1202 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1203 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1204 {
1205 /*
1206 * Register, register.
1207 */
1208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1209 IEM_MC_BEGIN(0, 0);
1210 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1211 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1212 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1213 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1214 IEM_MC_ADVANCE_RIP();
1215 IEM_MC_END();
1216 }
1217 else
1218 {
1219 /*
1220 * Memory, register.
1221 */
1222 IEM_MC_BEGIN(0, 2);
1223 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1225
1226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1228 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1229 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1230
1231 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1232 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1233
1234 IEM_MC_ADVANCE_RIP();
1235 IEM_MC_END();
1236 }
1237 return VINF_SUCCESS;
1238}
1239
1240
1241/**
1242 * @opcode 0x11
1243 * @oppfx 0x66
1244 * @opcpuid sse2
1245 * @opgroup og_sse2_pcksclr_datamove
1246 * @opxcpttype 4UA
1247 * @optest op1=1 op2=2 -> op1=2
1248 * @optest op1=0 op2=-42 -> op1=-42
1249 */
1250FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1251{
1252 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1253 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1254 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1255 {
1256 /*
1257 * Register, register.
1258 */
1259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1260 IEM_MC_BEGIN(0, 0);
1261 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1262 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1263 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1264 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1265 IEM_MC_ADVANCE_RIP();
1266 IEM_MC_END();
1267 }
1268 else
1269 {
1270 /*
1271 * Memory, register.
1272 */
1273 IEM_MC_BEGIN(0, 2);
1274 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1276
1277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1279 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1280 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1281
1282 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1283 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1284
1285 IEM_MC_ADVANCE_RIP();
1286 IEM_MC_END();
1287 }
1288 return VINF_SUCCESS;
1289}
1290
1291
1292/**
1293 * @opcode 0x11
1294 * @oppfx 0xf3
1295 * @opcpuid sse
1296 * @opgroup og_sse_simdfp_datamove
1297 * @opxcpttype 5
1298 * @optest op1=1 op2=2 -> op1=2
1299 * @optest op1=0 op2=-22 -> op1=-22
1300 */
1301FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1302{
1303 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1304 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1305 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1306 {
1307 /*
1308 * Register, register.
1309 */
1310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1311 IEM_MC_BEGIN(0, 1);
1312 IEM_MC_LOCAL(uint32_t, uSrc);
1313
1314 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1315 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1316 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1317 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1318
1319 IEM_MC_ADVANCE_RIP();
1320 IEM_MC_END();
1321 }
1322 else
1323 {
1324 /*
1325 * Memory, register.
1326 */
1327 IEM_MC_BEGIN(0, 2);
1328 IEM_MC_LOCAL(uint32_t, uSrc);
1329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1330
1331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1333 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1334 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1335
1336 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1337 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1338
1339 IEM_MC_ADVANCE_RIP();
1340 IEM_MC_END();
1341 }
1342 return VINF_SUCCESS;
1343}
1344
1345
1346/**
1347 * @opcode 0x11
1348 * @oppfx 0xf2
1349 * @opcpuid sse2
1350 * @opgroup og_sse2_pcksclr_datamove
1351 * @opxcpttype 5
1352 * @optest op1=1 op2=2 -> op1=2
1353 * @optest op1=0 op2=-42 -> op1=-42
1354 */
1355FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1356{
1357 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1359 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1360 {
1361 /*
1362 * Register, register.
1363 */
1364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1365 IEM_MC_BEGIN(0, 1);
1366 IEM_MC_LOCAL(uint64_t, uSrc);
1367
1368 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1369 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1370 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1371 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1372
1373 IEM_MC_ADVANCE_RIP();
1374 IEM_MC_END();
1375 }
1376 else
1377 {
1378 /*
1379 * Memory, register.
1380 */
1381 IEM_MC_BEGIN(0, 2);
1382 IEM_MC_LOCAL(uint64_t, uSrc);
1383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1384
1385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1387 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1388 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1389
1390 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1391 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1392
1393 IEM_MC_ADVANCE_RIP();
1394 IEM_MC_END();
1395 }
1396 return VINF_SUCCESS;
1397}
1398
1399
1400FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1401{
1402 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1403 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1404 {
1405 /**
1406 * @opcode 0x12
1407 * @opcodesub 11 mr/reg
1408 * @oppfx none
1409 * @opcpuid sse
1410 * @opgroup og_sse_simdfp_datamove
1411 * @opxcpttype 5
1412 * @optest op1=1 op2=2 -> op1=2
1413 * @optest op1=0 op2=-42 -> op1=-42
1414 */
1415 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1416
1417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1418 IEM_MC_BEGIN(0, 1);
1419 IEM_MC_LOCAL(uint64_t, uSrc);
1420
1421 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1422 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1423 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1424 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1425
1426 IEM_MC_ADVANCE_RIP();
1427 IEM_MC_END();
1428 }
1429 else
1430 {
1431 /**
1432 * @opdone
1433 * @opcode 0x12
1434 * @opcodesub !11 mr/reg
1435 * @oppfx none
1436 * @opcpuid sse
1437 * @opgroup og_sse_simdfp_datamove
1438 * @opxcpttype 5
1439 * @optest op1=1 op2=2 -> op1=2
1440 * @optest op1=0 op2=-42 -> op1=-42
1441 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1442 */
1443 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1444
1445 IEM_MC_BEGIN(0, 2);
1446 IEM_MC_LOCAL(uint64_t, uSrc);
1447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1448
1449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1451 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1452 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1453
1454 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1455 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1456
1457 IEM_MC_ADVANCE_RIP();
1458 IEM_MC_END();
1459 }
1460 return VINF_SUCCESS;
1461}
1462
1463
1464/**
1465 * @opcode 0x12
1466 * @opcodesub !11 mr/reg
1467 * @oppfx 0x66
1468 * @opcpuid sse2
1469 * @opgroup og_sse2_pcksclr_datamove
1470 * @opxcpttype 5
1471 * @optest op1=1 op2=2 -> op1=2
1472 * @optest op1=0 op2=-42 -> op1=-42
1473 */
1474FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1475{
1476 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1477 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1478 {
1479 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1480
1481 IEM_MC_BEGIN(0, 2);
1482 IEM_MC_LOCAL(uint64_t, uSrc);
1483 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1484
1485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1487 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1488 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1489
1490 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1491 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1492
1493 IEM_MC_ADVANCE_RIP();
1494 IEM_MC_END();
1495 return VINF_SUCCESS;
1496 }
1497
1498 /**
1499 * @opdone
1500 * @opmnemonic ud660f12m3
1501 * @opcode 0x12
1502 * @opcodesub 11 mr/reg
1503 * @oppfx 0x66
1504 * @opunused immediate
1505 * @opcpuid sse
1506 * @optest ->
1507 */
1508 return IEMOP_RAISE_INVALID_OPCODE();
1509}
1510
1511
1512/**
1513 * @opcode 0x12
1514 * @oppfx 0xf3
1515 * @opcpuid sse3
1516 * @opgroup og_sse3_pcksclr_datamove
1517 * @opxcpttype 4
1518 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1519 * op1=0x00000002000000020000000100000001
1520 */
1521FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1522{
1523 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1524 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1525 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1526 {
1527 /*
1528 * Register, register.
1529 */
1530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1531 IEM_MC_BEGIN(2, 0);
1532 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1533 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1534
1535 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1536 IEM_MC_PREPARE_SSE_USAGE();
1537
1538 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1539 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1540 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1541
1542 IEM_MC_ADVANCE_RIP();
1543 IEM_MC_END();
1544 }
1545 else
1546 {
1547 /*
1548 * Register, memory.
1549 */
1550 IEM_MC_BEGIN(2, 2);
1551 IEM_MC_LOCAL(RTUINT128U, uSrc);
1552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1553 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1554 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1555
1556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1558 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1559 IEM_MC_PREPARE_SSE_USAGE();
1560
1561 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1562 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1563 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1564
1565 IEM_MC_ADVANCE_RIP();
1566 IEM_MC_END();
1567 }
1568 return VINF_SUCCESS;
1569}
1570
1571
1572/**
1573 * @opcode 0x12
1574 * @oppfx 0xf2
1575 * @opcpuid sse3
1576 * @opgroup og_sse3_pcksclr_datamove
1577 * @opxcpttype 5
1578 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1579 * op1=0x22222222111111112222222211111111
1580 */
1581FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1582{
1583 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1585 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1586 {
1587 /*
1588 * Register, register.
1589 */
1590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1591 IEM_MC_BEGIN(2, 0);
1592 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1593 IEM_MC_ARG(uint64_t, uSrc, 1);
1594
1595 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1596 IEM_MC_PREPARE_SSE_USAGE();
1597
1598 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1599 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1600 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1601
1602 IEM_MC_ADVANCE_RIP();
1603 IEM_MC_END();
1604 }
1605 else
1606 {
1607 /*
1608 * Register, memory.
1609 */
1610 IEM_MC_BEGIN(2, 2);
1611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1612 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1613 IEM_MC_ARG(uint64_t, uSrc, 1);
1614
1615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1617 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1618 IEM_MC_PREPARE_SSE_USAGE();
1619
1620 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1621 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1622 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1623
1624 IEM_MC_ADVANCE_RIP();
1625 IEM_MC_END();
1626 }
1627 return VINF_SUCCESS;
1628}
1629
1630
1631/**
1632 * @opcode 0x13
1633 * @opcodesub !11 mr/reg
1634 * @oppfx none
1635 * @opcpuid sse
1636 * @opgroup og_sse_simdfp_datamove
1637 * @opxcpttype 5
1638 * @optest op1=1 op2=2 -> op1=2
1639 * @optest op1=0 op2=-42 -> op1=-42
1640 */
1641FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1642{
1643 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1644 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1645 {
1646 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1647
1648 IEM_MC_BEGIN(0, 2);
1649 IEM_MC_LOCAL(uint64_t, uSrc);
1650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1651
1652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1654 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1655 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1656
1657 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1658 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1659
1660 IEM_MC_ADVANCE_RIP();
1661 IEM_MC_END();
1662 return VINF_SUCCESS;
1663 }
1664
1665 /**
1666 * @opdone
1667 * @opmnemonic ud0f13m3
1668 * @opcode 0x13
1669 * @opcodesub 11 mr/reg
1670 * @oppfx none
1671 * @opunused immediate
1672 * @opcpuid sse
1673 * @optest ->
1674 */
1675 return IEMOP_RAISE_INVALID_OPCODE();
1676}
1677
1678
1679/**
1680 * @opcode 0x13
1681 * @opcodesub !11 mr/reg
1682 * @oppfx 0x66
1683 * @opcpuid sse2
1684 * @opgroup og_sse2_pcksclr_datamove
1685 * @opxcpttype 5
1686 * @optest op1=1 op2=2 -> op1=2
1687 * @optest op1=0 op2=-42 -> op1=-42
1688 */
1689FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1690{
1691 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1692 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1693 {
1694 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1695 IEM_MC_BEGIN(0, 2);
1696 IEM_MC_LOCAL(uint64_t, uSrc);
1697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1698
1699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1701 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1702 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1703
1704 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1705 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1706
1707 IEM_MC_ADVANCE_RIP();
1708 IEM_MC_END();
1709 return VINF_SUCCESS;
1710 }
1711
1712 /**
1713 * @opdone
1714 * @opmnemonic ud660f13m3
1715 * @opcode 0x13
1716 * @opcodesub 11 mr/reg
1717 * @oppfx 0x66
1718 * @opunused immediate
1719 * @opcpuid sse
1720 * @optest ->
1721 */
1722 return IEMOP_RAISE_INVALID_OPCODE();
1723}
1724
1725
1726/**
1727 * @opmnemonic udf30f13
1728 * @opcode 0x13
1729 * @oppfx 0xf3
1730 * @opunused intel-modrm
1731 * @opcpuid sse
1732 * @optest ->
1733 * @opdone
1734 */
1735
1736/**
1737 * @opmnemonic udf20f13
1738 * @opcode 0x13
1739 * @oppfx 0xf2
1740 * @opunused intel-modrm
1741 * @opcpuid sse
1742 * @optest ->
1743 * @opdone
1744 */
1745
1746/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1747FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1748/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1749FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1750
1751/**
1752 * @opdone
1753 * @opmnemonic udf30f14
1754 * @opcode 0x14
1755 * @oppfx 0xf3
1756 * @opunused intel-modrm
1757 * @opcpuid sse
1758 * @optest ->
1759 * @opdone
1760 */
1761
1762/**
1763 * @opmnemonic udf20f14
1764 * @opcode 0x14
1765 * @oppfx 0xf2
1766 * @opunused intel-modrm
1767 * @opcpuid sse
1768 * @optest ->
1769 * @opdone
1770 */
1771
1772/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1773FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1774/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1775FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1776/* Opcode 0xf3 0x0f 0x15 - invalid */
1777/* Opcode 0xf2 0x0f 0x15 - invalid */
1778
1779/**
1780 * @opdone
1781 * @opmnemonic udf30f15
1782 * @opcode 0x15
1783 * @oppfx 0xf3
1784 * @opunused intel-modrm
1785 * @opcpuid sse
1786 * @optest ->
1787 * @opdone
1788 */
1789
1790/**
1791 * @opmnemonic udf20f15
1792 * @opcode 0x15
1793 * @oppfx 0xf2
1794 * @opunused intel-modrm
1795 * @opcpuid sse
1796 * @optest ->
1797 * @opdone
1798 */
1799
1800FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1801{
1802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1803 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1804 {
1805 /**
1806 * @opcode 0x16
1807 * @opcodesub 11 mr/reg
1808 * @oppfx none
1809 * @opcpuid sse
1810 * @opgroup og_sse_simdfp_datamove
1811 * @opxcpttype 5
1812 * @optest op1=1 op2=2 -> op1=2
1813 * @optest op1=0 op2=-42 -> op1=-42
1814 */
1815 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1816
1817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1818 IEM_MC_BEGIN(0, 1);
1819 IEM_MC_LOCAL(uint64_t, uSrc);
1820
1821 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1822 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1823 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1824 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1825
1826 IEM_MC_ADVANCE_RIP();
1827 IEM_MC_END();
1828 }
1829 else
1830 {
1831 /**
1832 * @opdone
1833 * @opcode 0x16
1834 * @opcodesub !11 mr/reg
1835 * @oppfx none
1836 * @opcpuid sse
1837 * @opgroup og_sse_simdfp_datamove
1838 * @opxcpttype 5
1839 * @optest op1=1 op2=2 -> op1=2
1840 * @optest op1=0 op2=-42 -> op1=-42
1841 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1842 */
1843 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1844
1845 IEM_MC_BEGIN(0, 2);
1846 IEM_MC_LOCAL(uint64_t, uSrc);
1847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1848
1849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1851 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1852 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1853
1854 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1855 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1856
1857 IEM_MC_ADVANCE_RIP();
1858 IEM_MC_END();
1859 }
1860 return VINF_SUCCESS;
1861}
1862
1863
1864/**
1865 * @opcode 0x16
1866 * @opcodesub !11 mr/reg
1867 * @oppfx 0x66
1868 * @opcpuid sse2
1869 * @opgroup og_sse2_pcksclr_datamove
1870 * @opxcpttype 5
1871 * @optest op1=1 op2=2 -> op1=2
1872 * @optest op1=0 op2=-42 -> op1=-42
1873 */
1874FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1875{
1876 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1877 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1878 {
1879 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1880 IEM_MC_BEGIN(0, 2);
1881 IEM_MC_LOCAL(uint64_t, uSrc);
1882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1883
1884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1886 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1887 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1888
1889 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1890 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1891
1892 IEM_MC_ADVANCE_RIP();
1893 IEM_MC_END();
1894 return VINF_SUCCESS;
1895 }
1896
1897 /**
1898 * @opdone
1899 * @opmnemonic ud660f16m3
1900 * @opcode 0x16
1901 * @opcodesub 11 mr/reg
1902 * @oppfx 0x66
1903 * @opunused immediate
1904 * @opcpuid sse
1905 * @optest ->
1906 */
1907 return IEMOP_RAISE_INVALID_OPCODE();
1908}
1909
1910
1911/**
1912 * @opcode 0x16
1913 * @oppfx 0xf3
1914 * @opcpuid sse3
1915 * @opgroup og_sse3_pcksclr_datamove
1916 * @opxcpttype 4
1917 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1918 * op1=0x00000002000000020000000100000001
1919 */
1920FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1921{
1922 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1923 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1924 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1925 {
1926 /*
1927 * Register, register.
1928 */
1929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1930 IEM_MC_BEGIN(2, 0);
1931 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1932 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1933
1934 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1935 IEM_MC_PREPARE_SSE_USAGE();
1936
1937 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1938 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1939 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1940
1941 IEM_MC_ADVANCE_RIP();
1942 IEM_MC_END();
1943 }
1944 else
1945 {
1946 /*
1947 * Register, memory.
1948 */
1949 IEM_MC_BEGIN(2, 2);
1950 IEM_MC_LOCAL(RTUINT128U, uSrc);
1951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1952 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1953 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1954
1955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1957 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1958 IEM_MC_PREPARE_SSE_USAGE();
1959
1960 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1961 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1962 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1963
1964 IEM_MC_ADVANCE_RIP();
1965 IEM_MC_END();
1966 }
1967 return VINF_SUCCESS;
1968}
1969
1970/**
1971 * @opdone
1972 * @opmnemonic udf30f16
1973 * @opcode 0x16
1974 * @oppfx 0xf2
1975 * @opunused intel-modrm
1976 * @opcpuid sse
1977 * @optest ->
1978 * @opdone
1979 */
1980
1981
1982/**
1983 * @opcode 0x17
1984 * @opcodesub !11 mr/reg
1985 * @oppfx none
1986 * @opcpuid sse
1987 * @opgroup og_sse_simdfp_datamove
1988 * @opxcpttype 5
1989 * @optest op1=1 op2=2 -> op1=2
1990 * @optest op1=0 op2=-42 -> op1=-42
1991 */
1992FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
1993{
1994 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1995 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1996 {
1997 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1998
1999 IEM_MC_BEGIN(0, 2);
2000 IEM_MC_LOCAL(uint64_t, uSrc);
2001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2002
2003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2005 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2006 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2007
2008 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2009 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2010
2011 IEM_MC_ADVANCE_RIP();
2012 IEM_MC_END();
2013 return VINF_SUCCESS;
2014 }
2015
2016 /**
2017 * @opdone
2018 * @opmnemonic ud0f17m3
2019 * @opcode 0x17
2020 * @opcodesub 11 mr/reg
2021 * @oppfx none
2022 * @opunused immediate
2023 * @opcpuid sse
2024 * @optest ->
2025 */
2026 return IEMOP_RAISE_INVALID_OPCODE();
2027}
2028
2029
2030/**
2031 * @opcode 0x17
2032 * @opcodesub !11 mr/reg
2033 * @oppfx 0x66
2034 * @opcpuid sse2
2035 * @opgroup og_sse2_pcksclr_datamove
2036 * @opxcpttype 5
2037 * @optest op1=1 op2=2 -> op1=2
2038 * @optest op1=0 op2=-42 -> op1=-42
2039 */
2040FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
2041{
2042 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2043 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2044 {
2045 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2046
2047 IEM_MC_BEGIN(0, 2);
2048 IEM_MC_LOCAL(uint64_t, uSrc);
2049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2050
2051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2053 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2054 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2055
2056 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2057 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2058
2059 IEM_MC_ADVANCE_RIP();
2060 IEM_MC_END();
2061 return VINF_SUCCESS;
2062 }
2063
2064 /**
2065 * @opdone
2066 * @opmnemonic ud660f17m3
2067 * @opcode 0x17
2068 * @opcodesub 11 mr/reg
2069 * @oppfx 0x66
2070 * @opunused immediate
2071 * @opcpuid sse
2072 * @optest ->
2073 */
2074 return IEMOP_RAISE_INVALID_OPCODE();
2075}
2076
2077
2078/**
2079 * @opdone
2080 * @opmnemonic udf30f17
2081 * @opcode 0x17
2082 * @oppfx 0xf3
2083 * @opunused intel-modrm
2084 * @opcpuid sse
2085 * @optest ->
2086 * @opdone
2087 */
2088
2089/**
2090 * @opmnemonic udf20f17
2091 * @opcode 0x17
2092 * @oppfx 0xf2
2093 * @opunused intel-modrm
2094 * @opcpuid sse
2095 * @optest ->
2096 * @opdone
2097 */
2098
2099
2100/** Opcode 0x0f 0x18. */
2101FNIEMOP_DEF(iemOp_prefetch_Grp16)
2102{
2103 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2104 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2105 {
2106 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2107 {
2108 case 4: /* Aliased to /0 for the time being according to AMD. */
2109 case 5: /* Aliased to /0 for the time being according to AMD. */
2110 case 6: /* Aliased to /0 for the time being according to AMD. */
2111 case 7: /* Aliased to /0 for the time being according to AMD. */
2112 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2113 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2114 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2115 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2116 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2117 }
2118
2119 IEM_MC_BEGIN(0, 1);
2120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2123 /* Currently a NOP. */
2124 NOREF(GCPtrEffSrc);
2125 IEM_MC_ADVANCE_RIP();
2126 IEM_MC_END();
2127 return VINF_SUCCESS;
2128 }
2129
2130 return IEMOP_RAISE_INVALID_OPCODE();
2131}
2132
2133
2134/** Opcode 0x0f 0x19..0x1f. */
2135FNIEMOP_DEF(iemOp_nop_Ev)
2136{
2137 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2138 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2139 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2140 {
2141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2142 IEM_MC_BEGIN(0, 0);
2143 IEM_MC_ADVANCE_RIP();
2144 IEM_MC_END();
2145 }
2146 else
2147 {
2148 IEM_MC_BEGIN(0, 1);
2149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2152 /* Currently a NOP. */
2153 NOREF(GCPtrEffSrc);
2154 IEM_MC_ADVANCE_RIP();
2155 IEM_MC_END();
2156 }
2157 return VINF_SUCCESS;
2158}
2159
2160
2161/** Opcode 0x0f 0x20. */
2162FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2163{
2164 /* mod is ignored, as is operand size overrides. */
2165 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2166 IEMOP_HLP_MIN_386();
2167 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2168 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2169 else
2170 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2171
2172 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2173 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2174 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2175 {
2176 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2177 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2178 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2179 iCrReg |= 8;
2180 }
2181 switch (iCrReg)
2182 {
2183 case 0: case 2: case 3: case 4: case 8:
2184 break;
2185 default:
2186 return IEMOP_RAISE_INVALID_OPCODE();
2187 }
2188 IEMOP_HLP_DONE_DECODING();
2189
2190 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2191}
2192
2193
2194/** Opcode 0x0f 0x21. */
2195FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2196{
2197 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2198 IEMOP_HLP_MIN_386();
2199 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2201 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2202 return IEMOP_RAISE_INVALID_OPCODE();
2203 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2204 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2205 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2206}
2207
2208
2209/** Opcode 0x0f 0x22. */
2210FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2211{
2212 /* mod is ignored, as is operand size overrides. */
2213 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2214 IEMOP_HLP_MIN_386();
2215 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2216 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2217 else
2218 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2219
2220 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2221 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2222 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2223 {
2224 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2225 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2226 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2227 iCrReg |= 8;
2228 }
2229 switch (iCrReg)
2230 {
2231 case 0: case 2: case 3: case 4: case 8:
2232 break;
2233 default:
2234 return IEMOP_RAISE_INVALID_OPCODE();
2235 }
2236 IEMOP_HLP_DONE_DECODING();
2237
2238 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2239}
2240
2241
2242/** Opcode 0x0f 0x23. */
2243FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2244{
2245 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2246 IEMOP_HLP_MIN_386();
2247 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2249 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2250 return IEMOP_RAISE_INVALID_OPCODE();
2251 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2252 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2253 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2254}
2255
2256
2257/** Opcode 0x0f 0x24. */
2258FNIEMOP_DEF(iemOp_mov_Rd_Td)
2259{
2260 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2261 /** @todo works on 386 and 486. */
2262 /* The RM byte is not considered, see testcase. */
2263 return IEMOP_RAISE_INVALID_OPCODE();
2264}
2265
2266
2267/** Opcode 0x0f 0x26. */
2268FNIEMOP_DEF(iemOp_mov_Td_Rd)
2269{
2270 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2271 /** @todo works on 386 and 486. */
2272 /* The RM byte is not considered, see testcase. */
2273 return IEMOP_RAISE_INVALID_OPCODE();
2274}
2275
2276
2277/**
2278 * @opcode 0x28
2279 * @oppfx none
2280 * @opcpuid sse
2281 * @opgroup og_sse_simdfp_datamove
2282 * @opxcpttype 1
2283 * @optest op1=1 op2=2 -> op1=2
2284 * @optest op1=0 op2=-42 -> op1=-42
2285 */
2286FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2287{
2288 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2290 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2291 {
2292 /*
2293 * Register, register.
2294 */
2295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2296 IEM_MC_BEGIN(0, 0);
2297 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2298 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2299 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2300 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2301 IEM_MC_ADVANCE_RIP();
2302 IEM_MC_END();
2303 }
2304 else
2305 {
2306 /*
2307 * Register, memory.
2308 */
2309 IEM_MC_BEGIN(0, 2);
2310 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2311 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2312
2313 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2315 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2316 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2317
2318 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2319 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2320
2321 IEM_MC_ADVANCE_RIP();
2322 IEM_MC_END();
2323 }
2324 return VINF_SUCCESS;
2325}
2326
2327/**
2328 * @opcode 0x28
2329 * @oppfx 66
2330 * @opcpuid sse2
2331 * @opgroup og_sse2_pcksclr_datamove
2332 * @opxcpttype 1
2333 * @optest op1=1 op2=2 -> op1=2
2334 * @optest op1=0 op2=-42 -> op1=-42
2335 */
2336FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2337{
2338 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2339 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2340 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2341 {
2342 /*
2343 * Register, register.
2344 */
2345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2346 IEM_MC_BEGIN(0, 0);
2347 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2348 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2349 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2350 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2351 IEM_MC_ADVANCE_RIP();
2352 IEM_MC_END();
2353 }
2354 else
2355 {
2356 /*
2357 * Register, memory.
2358 */
2359 IEM_MC_BEGIN(0, 2);
2360 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2362
2363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2365 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2366 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2367
2368 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2369 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2370
2371 IEM_MC_ADVANCE_RIP();
2372 IEM_MC_END();
2373 }
2374 return VINF_SUCCESS;
2375}
2376
2377/* Opcode 0xf3 0x0f 0x28 - invalid */
2378/* Opcode 0xf2 0x0f 0x28 - invalid */
2379
2380/**
2381 * @opcode 0x29
2382 * @oppfx none
2383 * @opcpuid sse
2384 * @opgroup og_sse_simdfp_datamove
2385 * @opxcpttype 1
2386 * @optest op1=1 op2=2 -> op1=2
2387 * @optest op1=0 op2=-42 -> op1=-42
2388 */
2389FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2390{
2391 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2392 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2393 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2394 {
2395 /*
2396 * Register, register.
2397 */
2398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2399 IEM_MC_BEGIN(0, 0);
2400 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2401 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2402 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2403 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2404 IEM_MC_ADVANCE_RIP();
2405 IEM_MC_END();
2406 }
2407 else
2408 {
2409 /*
2410 * Memory, register.
2411 */
2412 IEM_MC_BEGIN(0, 2);
2413 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2415
2416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2418 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2419 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2420
2421 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2422 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2423
2424 IEM_MC_ADVANCE_RIP();
2425 IEM_MC_END();
2426 }
2427 return VINF_SUCCESS;
2428}
2429
2430/**
2431 * @opcode 0x29
2432 * @oppfx 66
2433 * @opcpuid sse2
2434 * @opgroup og_sse2_pcksclr_datamove
2435 * @opxcpttype 1
2436 * @optest op1=1 op2=2 -> op1=2
2437 * @optest op1=0 op2=-42 -> op1=-42
2438 */
2439FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2440{
2441 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2442 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2443 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2444 {
2445 /*
2446 * Register, register.
2447 */
2448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2449 IEM_MC_BEGIN(0, 0);
2450 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2451 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2452 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2453 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2454 IEM_MC_ADVANCE_RIP();
2455 IEM_MC_END();
2456 }
2457 else
2458 {
2459 /*
2460 * Memory, register.
2461 */
2462 IEM_MC_BEGIN(0, 2);
2463 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2464 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2465
2466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2468 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2469 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2470
2471 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2472 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2473
2474 IEM_MC_ADVANCE_RIP();
2475 IEM_MC_END();
2476 }
2477 return VINF_SUCCESS;
2478}
2479
2480/* Opcode 0xf3 0x0f 0x29 - invalid */
2481/* Opcode 0xf2 0x0f 0x29 - invalid */
2482
2483
2484/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2485FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2486/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2487FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2488/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2489FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2490/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2491FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2492
2493
2494/**
2495 * @opcode 0x2b
2496 * @opcodesub !11 mr/reg
2497 * @oppfx none
2498 * @opcpuid sse
2499 * @opgroup og_sse1_cachect
2500 * @opxcpttype 1
2501 * @optest op1=1 op2=2 -> op1=2
2502 * @optest op1=0 op2=-42 -> op1=-42
2503 */
2504FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2505{
2506 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2507 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2508 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2509 {
2510 /*
2511 * memory, register.
2512 */
2513 IEM_MC_BEGIN(0, 2);
2514 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2515 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2516
2517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2519 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2520 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2521
2522 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2523 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2524
2525 IEM_MC_ADVANCE_RIP();
2526 IEM_MC_END();
2527 }
2528 /* The register, register encoding is invalid. */
2529 else
2530 return IEMOP_RAISE_INVALID_OPCODE();
2531 return VINF_SUCCESS;
2532}
2533
2534/**
2535 * @opcode 0x2b
2536 * @opcodesub !11 mr/reg
2537 * @oppfx 0x66
2538 * @opcpuid sse2
2539 * @opgroup og_sse2_cachect
2540 * @opxcpttype 1
2541 * @optest op1=1 op2=2 -> op1=2
2542 * @optest op1=0 op2=-42 -> op1=-42
2543 */
2544FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2545{
2546 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2547 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2548 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2549 {
2550 /*
2551 * memory, register.
2552 */
2553 IEM_MC_BEGIN(0, 2);
2554 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2556
2557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2559 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2560 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2561
2562 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2563 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2564
2565 IEM_MC_ADVANCE_RIP();
2566 IEM_MC_END();
2567 }
2568 /* The register, register encoding is invalid. */
2569 else
2570 return IEMOP_RAISE_INVALID_OPCODE();
2571 return VINF_SUCCESS;
2572}
2573/* Opcode 0xf3 0x0f 0x2b - invalid */
2574/* Opcode 0xf2 0x0f 0x2b - invalid */
2575
2576
2577/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2578FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2579/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2580FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2581/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2582FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2583/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2584FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2585
2586/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2587FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2588/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2589FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2590/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2591FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2592/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2593FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2594
2595/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2596FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2597/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2598FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2599/* Opcode 0xf3 0x0f 0x2e - invalid */
2600/* Opcode 0xf2 0x0f 0x2e - invalid */
2601
2602/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2603FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2604/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2605FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2606/* Opcode 0xf3 0x0f 0x2f - invalid */
2607/* Opcode 0xf2 0x0f 0x2f - invalid */
2608
2609/** Opcode 0x0f 0x30. */
2610FNIEMOP_DEF(iemOp_wrmsr)
2611{
2612 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2614 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2615}
2616
2617
2618/** Opcode 0x0f 0x31. */
2619FNIEMOP_DEF(iemOp_rdtsc)
2620{
2621 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2623 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2624}
2625
2626
2627/** Opcode 0x0f 0x33. */
2628FNIEMOP_DEF(iemOp_rdmsr)
2629{
2630 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2632 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2633}
2634
2635
2636/** Opcode 0x0f 0x34. */
2637FNIEMOP_DEF(iemOp_rdpmc)
2638{
2639 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2641 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2642}
2643
2644
2645/** Opcode 0x0f 0x34. */
2646FNIEMOP_STUB(iemOp_sysenter);
2647/** Opcode 0x0f 0x35. */
2648FNIEMOP_STUB(iemOp_sysexit);
2649/** Opcode 0x0f 0x37. */
2650FNIEMOP_STUB(iemOp_getsec);
2651
2652
2653/** Opcode 0x0f 0x38. */
2654FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2655{
2656#ifdef IEM_WITH_THREE_0F_38
2657 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2658 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2659#else
2660 IEMOP_BITCH_ABOUT_STUB();
2661 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2662#endif
2663}
2664
2665
2666/** Opcode 0x0f 0x3a. */
2667FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2668{
2669#ifdef IEM_WITH_THREE_0F_3A
2670 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2671 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2672#else
2673 IEMOP_BITCH_ABOUT_STUB();
2674 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2675#endif
2676}
2677
2678
2679/**
2680 * Implements a conditional move.
2681 *
2682 * Wish there was an obvious way to do this where we could share and reduce
2683 * code bloat.
2684 *
2685 * @param a_Cnd The conditional "microcode" operation.
2686 */
2687#define CMOV_X(a_Cnd) \
2688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2689 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2690 { \
2691 switch (pVCpu->iem.s.enmEffOpSize) \
2692 { \
2693 case IEMMODE_16BIT: \
2694 IEM_MC_BEGIN(0, 1); \
2695 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2696 a_Cnd { \
2697 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2698 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2699 } IEM_MC_ENDIF(); \
2700 IEM_MC_ADVANCE_RIP(); \
2701 IEM_MC_END(); \
2702 return VINF_SUCCESS; \
2703 \
2704 case IEMMODE_32BIT: \
2705 IEM_MC_BEGIN(0, 1); \
2706 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2707 a_Cnd { \
2708 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2709 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2710 } IEM_MC_ELSE() { \
2711 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2712 } IEM_MC_ENDIF(); \
2713 IEM_MC_ADVANCE_RIP(); \
2714 IEM_MC_END(); \
2715 return VINF_SUCCESS; \
2716 \
2717 case IEMMODE_64BIT: \
2718 IEM_MC_BEGIN(0, 1); \
2719 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2720 a_Cnd { \
2721 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2722 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2723 } IEM_MC_ENDIF(); \
2724 IEM_MC_ADVANCE_RIP(); \
2725 IEM_MC_END(); \
2726 return VINF_SUCCESS; \
2727 \
2728 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2729 } \
2730 } \
2731 else \
2732 { \
2733 switch (pVCpu->iem.s.enmEffOpSize) \
2734 { \
2735 case IEMMODE_16BIT: \
2736 IEM_MC_BEGIN(0, 2); \
2737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2738 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2740 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2741 a_Cnd { \
2742 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2743 } IEM_MC_ENDIF(); \
2744 IEM_MC_ADVANCE_RIP(); \
2745 IEM_MC_END(); \
2746 return VINF_SUCCESS; \
2747 \
2748 case IEMMODE_32BIT: \
2749 IEM_MC_BEGIN(0, 2); \
2750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2751 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2753 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2754 a_Cnd { \
2755 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2756 } IEM_MC_ELSE() { \
2757 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2758 } IEM_MC_ENDIF(); \
2759 IEM_MC_ADVANCE_RIP(); \
2760 IEM_MC_END(); \
2761 return VINF_SUCCESS; \
2762 \
2763 case IEMMODE_64BIT: \
2764 IEM_MC_BEGIN(0, 2); \
2765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2766 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2768 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2769 a_Cnd { \
2770 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2771 } IEM_MC_ENDIF(); \
2772 IEM_MC_ADVANCE_RIP(); \
2773 IEM_MC_END(); \
2774 return VINF_SUCCESS; \
2775 \
2776 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2777 } \
2778 } do {} while (0)
2779
2780
2781
2782/** Opcode 0x0f 0x40. */
2783FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2784{
2785 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2786 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2787}
2788
2789
2790/** Opcode 0x0f 0x41. */
2791FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2792{
2793 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2794 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2795}
2796
2797
2798/** Opcode 0x0f 0x42. */
2799FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2800{
2801 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2802 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2803}
2804
2805
2806/** Opcode 0x0f 0x43. */
2807FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2808{
2809 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2810 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2811}
2812
2813
2814/** Opcode 0x0f 0x44. */
2815FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2816{
2817 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2818 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2819}
2820
2821
2822/** Opcode 0x0f 0x45. */
2823FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2824{
2825 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2826 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2827}
2828
2829
2830/** Opcode 0x0f 0x46. */
2831FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2832{
2833 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2834 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2835}
2836
2837
2838/** Opcode 0x0f 0x47. */
2839FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2840{
2841 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2842 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2843}
2844
2845
2846/** Opcode 0x0f 0x48. */
2847FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2848{
2849 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2850 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2851}
2852
2853
2854/** Opcode 0x0f 0x49. */
2855FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2856{
2857 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2858 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2859}
2860
2861
2862/** Opcode 0x0f 0x4a. */
2863FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2864{
2865 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2866 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2867}
2868
2869
2870/** Opcode 0x0f 0x4b. */
2871FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2872{
2873 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2874 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2875}
2876
2877
2878/** Opcode 0x0f 0x4c. */
2879FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2880{
2881 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2882 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2883}
2884
2885
2886/** Opcode 0x0f 0x4d. */
2887FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2888{
2889 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2890 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2891}
2892
2893
2894/** Opcode 0x0f 0x4e. */
2895FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2896{
2897 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2898 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2899}
2900
2901
2902/** Opcode 0x0f 0x4f. */
2903FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2904{
2905 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2906 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2907}
2908
2909#undef CMOV_X
2910
2911/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2912FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2913/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2914FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2915/* Opcode 0xf3 0x0f 0x50 - invalid */
2916/* Opcode 0xf2 0x0f 0x50 - invalid */
2917
2918/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2919FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2920/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2921FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2922/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2923FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2924/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2925FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2926
2927/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2928FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2929/* Opcode 0x66 0x0f 0x52 - invalid */
2930/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2931FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2932/* Opcode 0xf2 0x0f 0x52 - invalid */
2933
2934/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2935FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2936/* Opcode 0x66 0x0f 0x53 - invalid */
2937/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2938FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2939/* Opcode 0xf2 0x0f 0x53 - invalid */
2940
2941/** Opcode 0x0f 0x54 - andps Vps, Wps */
2942FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2943/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2944FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2945/* Opcode 0xf3 0x0f 0x54 - invalid */
2946/* Opcode 0xf2 0x0f 0x54 - invalid */
2947
2948/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2949FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2950/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2951FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2952/* Opcode 0xf3 0x0f 0x55 - invalid */
2953/* Opcode 0xf2 0x0f 0x55 - invalid */
2954
2955/** Opcode 0x0f 0x56 - orps Vps, Wps */
2956FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2957/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2958FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2959/* Opcode 0xf3 0x0f 0x56 - invalid */
2960/* Opcode 0xf2 0x0f 0x56 - invalid */
2961
2962/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2963FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2964/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2965FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2966/* Opcode 0xf3 0x0f 0x57 - invalid */
2967/* Opcode 0xf2 0x0f 0x57 - invalid */
2968
2969/** Opcode 0x0f 0x58 - addps Vps, Wps */
2970FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2971/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2972FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2973/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2974FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2975/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2976FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2977
2978/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2979FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2980/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2981FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2982/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2983FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2984/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2985FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2986
2987/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2988FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2989/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2990FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2991/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2992FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2993/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2994FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2995
2996/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2997FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2998/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2999FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
3000/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
3001FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
3002/* Opcode 0xf2 0x0f 0x5b - invalid */
3003
3004/** Opcode 0x0f 0x5c - subps Vps, Wps */
3005FNIEMOP_STUB(iemOp_subps_Vps_Wps);
3006/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
3007FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
3008/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
3009FNIEMOP_STUB(iemOp_subss_Vss_Wss);
3010/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
3011FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
3012
3013/** Opcode 0x0f 0x5d - minps Vps, Wps */
3014FNIEMOP_STUB(iemOp_minps_Vps_Wps);
3015/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
3016FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
3017/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
3018FNIEMOP_STUB(iemOp_minss_Vss_Wss);
3019/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
3020FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
3021
3022/** Opcode 0x0f 0x5e - divps Vps, Wps */
3023FNIEMOP_STUB(iemOp_divps_Vps_Wps);
3024/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
3025FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
3026/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
3027FNIEMOP_STUB(iemOp_divss_Vss_Wss);
3028/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
3029FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
3030
3031/** Opcode 0x0f 0x5f - maxps Vps, Wps */
3032FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
3033/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
3034FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
3035/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
3036FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
3037/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
3038FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
3039
3040/**
3041 * Common worker for MMX instructions on the forms:
3042 * pxxxx mm1, mm2/mem32
3043 *
3044 * The 2nd operand is the first half of a register, which in the memory case
3045 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3046 * memory accessed for MMX.
3047 *
3048 * Exceptions type 4.
3049 */
3050FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3051{
3052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3053 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3054 {
3055 /*
3056 * Register, register.
3057 */
3058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3059 IEM_MC_BEGIN(2, 0);
3060 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3061 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3062 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3063 IEM_MC_PREPARE_SSE_USAGE();
3064 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3065 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3066 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3067 IEM_MC_ADVANCE_RIP();
3068 IEM_MC_END();
3069 }
3070 else
3071 {
3072 /*
3073 * Register, memory.
3074 */
3075 IEM_MC_BEGIN(2, 2);
3076 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3077 IEM_MC_LOCAL(uint64_t, uSrc);
3078 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3079 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3080
3081 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3083 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3084 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3085
3086 IEM_MC_PREPARE_SSE_USAGE();
3087 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3088 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3089
3090 IEM_MC_ADVANCE_RIP();
3091 IEM_MC_END();
3092 }
3093 return VINF_SUCCESS;
3094}
3095
3096
3097/**
3098 * Common worker for SSE2 instructions on the forms:
3099 * pxxxx xmm1, xmm2/mem128
3100 *
3101 * The 2nd operand is the first half of a register, which in the memory case
3102 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3103 * memory accessed for MMX.
3104 *
3105 * Exceptions type 4.
3106 */
3107FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3108{
3109 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3110 if (!pImpl->pfnU64)
3111 return IEMOP_RAISE_INVALID_OPCODE();
3112 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3113 {
3114 /*
3115 * Register, register.
3116 */
3117 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3118 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3120 IEM_MC_BEGIN(2, 0);
3121 IEM_MC_ARG(uint64_t *, pDst, 0);
3122 IEM_MC_ARG(uint32_t const *, pSrc, 1);
3123 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3124 IEM_MC_PREPARE_FPU_USAGE();
3125 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3126 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3127 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3128 IEM_MC_ADVANCE_RIP();
3129 IEM_MC_END();
3130 }
3131 else
3132 {
3133 /*
3134 * Register, memory.
3135 */
3136 IEM_MC_BEGIN(2, 2);
3137 IEM_MC_ARG(uint64_t *, pDst, 0);
3138 IEM_MC_LOCAL(uint32_t, uSrc);
3139 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3140 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3141
3142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3144 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3145 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3146
3147 IEM_MC_PREPARE_FPU_USAGE();
3148 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3149 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3150
3151 IEM_MC_ADVANCE_RIP();
3152 IEM_MC_END();
3153 }
3154 return VINF_SUCCESS;
3155}
3156
3157
3158/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3159FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3160{
3161 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3162 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3163}
3164
3165/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3166FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3167{
3168 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3169 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3170}
3171
3172/* Opcode 0xf3 0x0f 0x60 - invalid */
3173
3174
3175/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3176FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3177{
3178 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3179 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3180}
3181
3182/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3183FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3184{
3185 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3186 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3187}
3188
3189/* Opcode 0xf3 0x0f 0x61 - invalid */
3190
3191
3192/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3193FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3194{
3195 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3196 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3197}
3198
3199/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3200FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3201{
3202 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3203 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3204}
3205
3206/* Opcode 0xf3 0x0f 0x62 - invalid */
3207
3208
3209
3210/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3211FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3212/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3213FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3214/* Opcode 0xf3 0x0f 0x63 - invalid */
3215
3216/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3217FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3218/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3219FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3220/* Opcode 0xf3 0x0f 0x64 - invalid */
3221
3222/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3223FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3224/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3225FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3226/* Opcode 0xf3 0x0f 0x65 - invalid */
3227
3228/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3229FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3230/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3231FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3232/* Opcode 0xf3 0x0f 0x66 - invalid */
3233
3234/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3235FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3236/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3237FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3238/* Opcode 0xf3 0x0f 0x67 - invalid */
3239
3240
3241/**
3242 * Common worker for MMX instructions on the form:
3243 * pxxxx mm1, mm2/mem64
3244 *
3245 * The 2nd operand is the second half of a register, which in the memory case
3246 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3247 * where it may read the full 128 bits or only the upper 64 bits.
3248 *
3249 * Exceptions type 4.
3250 */
3251FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3252{
3253 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3254 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3255 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3256 {
3257 /*
3258 * Register, register.
3259 */
3260 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3261 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3263 IEM_MC_BEGIN(2, 0);
3264 IEM_MC_ARG(uint64_t *, pDst, 0);
3265 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3266 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3267 IEM_MC_PREPARE_FPU_USAGE();
3268 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3269 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3270 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3271 IEM_MC_ADVANCE_RIP();
3272 IEM_MC_END();
3273 }
3274 else
3275 {
3276 /*
3277 * Register, memory.
3278 */
3279 IEM_MC_BEGIN(2, 2);
3280 IEM_MC_ARG(uint64_t *, pDst, 0);
3281 IEM_MC_LOCAL(uint64_t, uSrc);
3282 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3284
3285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3287 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3288 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3289
3290 IEM_MC_PREPARE_FPU_USAGE();
3291 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3292 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3293
3294 IEM_MC_ADVANCE_RIP();
3295 IEM_MC_END();
3296 }
3297 return VINF_SUCCESS;
3298}
3299
3300
3301/**
3302 * Common worker for SSE2 instructions on the form:
3303 * pxxxx xmm1, xmm2/mem128
3304 *
3305 * The 2nd operand is the second half of a register, which in the memory case
3306 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3307 * where it may read the full 128 bits or only the upper 64 bits.
3308 *
3309 * Exceptions type 4.
3310 */
3311FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3312{
3313 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3314 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3315 {
3316 /*
3317 * Register, register.
3318 */
3319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3320 IEM_MC_BEGIN(2, 0);
3321 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3322 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3323 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3324 IEM_MC_PREPARE_SSE_USAGE();
3325 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3326 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3327 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3328 IEM_MC_ADVANCE_RIP();
3329 IEM_MC_END();
3330 }
3331 else
3332 {
3333 /*
3334 * Register, memory.
3335 */
3336 IEM_MC_BEGIN(2, 2);
3337 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3338 IEM_MC_LOCAL(RTUINT128U, uSrc);
3339 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3341
3342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3344 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3345 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3346
3347 IEM_MC_PREPARE_SSE_USAGE();
3348 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3349 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3350
3351 IEM_MC_ADVANCE_RIP();
3352 IEM_MC_END();
3353 }
3354 return VINF_SUCCESS;
3355}
3356
3357
3358/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3359FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3360{
3361 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3362 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3363}
3364
3365/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3366FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3367{
3368 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3369 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3370}
3371/* Opcode 0xf3 0x0f 0x68 - invalid */
3372
3373
3374/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3375FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3376{
3377 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3378 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3379}
3380
3381/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3382FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3383{
3384 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3385 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3386
3387}
3388/* Opcode 0xf3 0x0f 0x69 - invalid */
3389
3390
3391/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3392FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3393{
3394 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3395 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3396}
3397
3398/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3399FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3400{
3401 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3402 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3403}
3404/* Opcode 0xf3 0x0f 0x6a - invalid */
3405
3406
3407/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3408FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3409/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3410FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3411/* Opcode 0xf3 0x0f 0x6b - invalid */
3412
3413
3414/* Opcode 0x0f 0x6c - invalid */
3415
3416/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3417FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3418{
3419 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3420 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3421}
3422
3423/* Opcode 0xf3 0x0f 0x6c - invalid */
3424/* Opcode 0xf2 0x0f 0x6c - invalid */
3425
3426
3427/* Opcode 0x0f 0x6d - invalid */
3428
3429/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3430FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3431{
3432 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3433 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3434}
3435
3436/* Opcode 0xf3 0x0f 0x6d - invalid */
3437
3438
3439FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3440{
3441 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3442 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3443 {
3444 /**
3445 * @opcode 0x6e
3446 * @opcodesub rex.w=1
3447 * @oppfx none
3448 * @opcpuid mmx
3449 * @opgroup og_mmx_datamove
3450 * @opxcpttype 5
3451 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
3452 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
3453 */
3454 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3455 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3456 {
3457 /* MMX, greg64 */
3458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3459 IEM_MC_BEGIN(0, 1);
3460 IEM_MC_LOCAL(uint64_t, u64Tmp);
3461
3462 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3463 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3464
3465 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3466 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3467 IEM_MC_FPU_TO_MMX_MODE();
3468
3469 IEM_MC_ADVANCE_RIP();
3470 IEM_MC_END();
3471 }
3472 else
3473 {
3474 /* MMX, [mem64] */
3475 IEM_MC_BEGIN(0, 2);
3476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3477 IEM_MC_LOCAL(uint64_t, u64Tmp);
3478
3479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3481 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3482 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3483
3484 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3485 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3486 IEM_MC_FPU_TO_MMX_MODE();
3487
3488 IEM_MC_ADVANCE_RIP();
3489 IEM_MC_END();
3490 }
3491 }
3492 else
3493 {
3494 /**
3495 * @opdone
3496 * @opcode 0x6e
3497 * @opcodesub rex.w=0
3498 * @oppfx none
3499 * @opcpuid mmx
3500 * @opgroup og_mmx_datamove
3501 * @opxcpttype 5
3502 * @opfunction iemOp_movd_q_Pd_Ey
3503 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3504 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3505 */
3506 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3507 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3508 {
3509 /* MMX, greg */
3510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3511 IEM_MC_BEGIN(0, 1);
3512 IEM_MC_LOCAL(uint64_t, u64Tmp);
3513
3514 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3515 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3516
3517 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3518 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3519 IEM_MC_FPU_TO_MMX_MODE();
3520
3521 IEM_MC_ADVANCE_RIP();
3522 IEM_MC_END();
3523 }
3524 else
3525 {
3526 /* MMX, [mem] */
3527 IEM_MC_BEGIN(0, 2);
3528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3529 IEM_MC_LOCAL(uint32_t, u32Tmp);
3530
3531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3533 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3534 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3535
3536 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3537 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3538 IEM_MC_FPU_TO_MMX_MODE();
3539
3540 IEM_MC_ADVANCE_RIP();
3541 IEM_MC_END();
3542 }
3543 }
3544 return VINF_SUCCESS;
3545}
3546
3547FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3548{
3549 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3550 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3551 {
3552 /**
3553 * @opcode 0x6e
3554 * @opcodesub rex.w=1
3555 * @oppfx 0x66
3556 * @opcpuid sse2
3557 * @opgroup og_sse2_simdint_datamove
3558 * @opxcpttype 5
3559 * @optest 64-bit / op1=1 op2=2 -> op1=2
3560 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3561 */
3562 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3563 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3564 {
3565 /* XMM, greg64 */
3566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3567 IEM_MC_BEGIN(0, 1);
3568 IEM_MC_LOCAL(uint64_t, u64Tmp);
3569
3570 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3571 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3572
3573 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3574 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3575
3576 IEM_MC_ADVANCE_RIP();
3577 IEM_MC_END();
3578 }
3579 else
3580 {
3581 /* XMM, [mem64] */
3582 IEM_MC_BEGIN(0, 2);
3583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3584 IEM_MC_LOCAL(uint64_t, u64Tmp);
3585
3586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3588 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3589 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3590
3591 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3592 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3593
3594 IEM_MC_ADVANCE_RIP();
3595 IEM_MC_END();
3596 }
3597 }
3598 else
3599 {
3600 /**
3601 * @opdone
3602 * @opcode 0x6e
3603 * @opcodesub rex.w=0
3604 * @oppfx 0x66
3605 * @opcpuid sse2
3606 * @opgroup og_sse2_simdint_datamove
3607 * @opxcpttype 5
3608 * @opfunction iemOp_movd_q_Vy_Ey
3609 * @optest op1=1 op2=2 -> op1=2
3610 * @optest op1=0 op2=-42 -> op1=-42
3611 */
3612 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3613 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3614 {
3615 /* XMM, greg32 */
3616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3617 IEM_MC_BEGIN(0, 1);
3618 IEM_MC_LOCAL(uint32_t, u32Tmp);
3619
3620 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3621 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3622
3623 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3624 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3625
3626 IEM_MC_ADVANCE_RIP();
3627 IEM_MC_END();
3628 }
3629 else
3630 {
3631 /* XMM, [mem32] */
3632 IEM_MC_BEGIN(0, 2);
3633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3634 IEM_MC_LOCAL(uint32_t, u32Tmp);
3635
3636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3638 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3639 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3640
3641 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3642 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3643
3644 IEM_MC_ADVANCE_RIP();
3645 IEM_MC_END();
3646 }
3647 }
3648 return VINF_SUCCESS;
3649}
3650
3651/* Opcode 0xf3 0x0f 0x6e - invalid */
3652
3653
3654/**
3655 * @opcode 0x6f
3656 * @oppfx none
3657 * @opcpuid mmx
3658 * @opgroup og_mmx_datamove
3659 * @opxcpttype 5
3660 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3661 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3662 */
3663FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3664{
3665 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3667 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3668 {
3669 /*
3670 * Register, register.
3671 */
3672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3673 IEM_MC_BEGIN(0, 1);
3674 IEM_MC_LOCAL(uint64_t, u64Tmp);
3675
3676 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3677 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3678
3679 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3680 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3681 IEM_MC_FPU_TO_MMX_MODE();
3682
3683 IEM_MC_ADVANCE_RIP();
3684 IEM_MC_END();
3685 }
3686 else
3687 {
3688 /*
3689 * Register, memory.
3690 */
3691 IEM_MC_BEGIN(0, 2);
3692 IEM_MC_LOCAL(uint64_t, u64Tmp);
3693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3694
3695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3697 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3698 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3699
3700 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3701 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3702 IEM_MC_FPU_TO_MMX_MODE();
3703
3704 IEM_MC_ADVANCE_RIP();
3705 IEM_MC_END();
3706 }
3707 return VINF_SUCCESS;
3708}
3709
3710/**
3711 * @opcode 0x6f
3712 * @oppfx 0x66
3713 * @opcpuid sse2
3714 * @opgroup og_sse2_simdint_datamove
3715 * @opxcpttype 1
3716 * @optest op1=1 op2=2 -> op1=2
3717 * @optest op1=0 op2=-42 -> op1=-42
3718 */
3719FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
3720{
3721 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3722 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3723 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3724 {
3725 /*
3726 * Register, register.
3727 */
3728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3729 IEM_MC_BEGIN(0, 0);
3730
3731 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3732 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3733
3734 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3735 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3736 IEM_MC_ADVANCE_RIP();
3737 IEM_MC_END();
3738 }
3739 else
3740 {
3741 /*
3742 * Register, memory.
3743 */
3744 IEM_MC_BEGIN(0, 2);
3745 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3747
3748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3750 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3751 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3752
3753 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3754 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3755
3756 IEM_MC_ADVANCE_RIP();
3757 IEM_MC_END();
3758 }
3759 return VINF_SUCCESS;
3760}
3761
3762/**
3763 * @opcode 0x6f
3764 * @oppfx 0xf3
3765 * @opcpuid sse2
3766 * @opgroup og_sse2_simdint_datamove
3767 * @opxcpttype 4UA
3768 * @optest op1=1 op2=2 -> op1=2
3769 * @optest op1=0 op2=-42 -> op1=-42
3770 */
3771FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
3772{
3773 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3774 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3775 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3776 {
3777 /*
3778 * Register, register.
3779 */
3780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3781 IEM_MC_BEGIN(0, 0);
3782 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3783 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3784 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3785 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3786 IEM_MC_ADVANCE_RIP();
3787 IEM_MC_END();
3788 }
3789 else
3790 {
3791 /*
3792 * Register, memory.
3793 */
3794 IEM_MC_BEGIN(0, 2);
3795 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3797
3798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3800 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3801 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3802 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3803 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3804
3805 IEM_MC_ADVANCE_RIP();
3806 IEM_MC_END();
3807 }
3808 return VINF_SUCCESS;
3809}
3810
3811
3812/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3813FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3814{
3815 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3816 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3817 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3818 {
3819 /*
3820 * Register, register.
3821 */
3822 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3824
3825 IEM_MC_BEGIN(3, 0);
3826 IEM_MC_ARG(uint64_t *, pDst, 0);
3827 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3828 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3829 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3830 IEM_MC_PREPARE_FPU_USAGE();
3831 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3832 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3833 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3834 IEM_MC_ADVANCE_RIP();
3835 IEM_MC_END();
3836 }
3837 else
3838 {
3839 /*
3840 * Register, memory.
3841 */
3842 IEM_MC_BEGIN(3, 2);
3843 IEM_MC_ARG(uint64_t *, pDst, 0);
3844 IEM_MC_LOCAL(uint64_t, uSrc);
3845 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3847
3848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3849 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3850 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3852 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3853
3854 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3855 IEM_MC_PREPARE_FPU_USAGE();
3856 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3857 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3858
3859 IEM_MC_ADVANCE_RIP();
3860 IEM_MC_END();
3861 }
3862 return VINF_SUCCESS;
3863}
3864
3865/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3866FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3867{
3868 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3869 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3870 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3871 {
3872 /*
3873 * Register, register.
3874 */
3875 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3877
3878 IEM_MC_BEGIN(3, 0);
3879 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3880 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3881 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3882 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3883 IEM_MC_PREPARE_SSE_USAGE();
3884 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3885 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3886 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3887 IEM_MC_ADVANCE_RIP();
3888 IEM_MC_END();
3889 }
3890 else
3891 {
3892 /*
3893 * Register, memory.
3894 */
3895 IEM_MC_BEGIN(3, 2);
3896 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3897 IEM_MC_LOCAL(RTUINT128U, uSrc);
3898 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3900
3901 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3902 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3903 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3905 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3906
3907 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3908 IEM_MC_PREPARE_SSE_USAGE();
3909 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3910 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3911
3912 IEM_MC_ADVANCE_RIP();
3913 IEM_MC_END();
3914 }
3915 return VINF_SUCCESS;
3916}
3917
3918/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3919FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3920{
3921 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3922 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3923 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3924 {
3925 /*
3926 * Register, register.
3927 */
3928 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3930
3931 IEM_MC_BEGIN(3, 0);
3932 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3933 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3934 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3935 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3936 IEM_MC_PREPARE_SSE_USAGE();
3937 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3938 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3939 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3940 IEM_MC_ADVANCE_RIP();
3941 IEM_MC_END();
3942 }
3943 else
3944 {
3945 /*
3946 * Register, memory.
3947 */
3948 IEM_MC_BEGIN(3, 2);
3949 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3950 IEM_MC_LOCAL(RTUINT128U, uSrc);
3951 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3952 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3953
3954 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3955 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3956 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3958 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3959
3960 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3961 IEM_MC_PREPARE_SSE_USAGE();
3962 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3963 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3964
3965 IEM_MC_ADVANCE_RIP();
3966 IEM_MC_END();
3967 }
3968 return VINF_SUCCESS;
3969}
3970
3971/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3972FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3973{
3974 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3975 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3976 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3977 {
3978 /*
3979 * Register, register.
3980 */
3981 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3983
3984 IEM_MC_BEGIN(3, 0);
3985 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3986 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3987 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3988 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3989 IEM_MC_PREPARE_SSE_USAGE();
3990 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3991 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3992 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3993 IEM_MC_ADVANCE_RIP();
3994 IEM_MC_END();
3995 }
3996 else
3997 {
3998 /*
3999 * Register, memory.
4000 */
4001 IEM_MC_BEGIN(3, 2);
4002 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4003 IEM_MC_LOCAL(RTUINT128U, uSrc);
4004 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4006
4007 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4008 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4009 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4011 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4012
4013 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4014 IEM_MC_PREPARE_SSE_USAGE();
4015 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4016 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
4017
4018 IEM_MC_ADVANCE_RIP();
4019 IEM_MC_END();
4020 }
4021 return VINF_SUCCESS;
4022}
4023
4024
4025/** Opcode 0x0f 0x71 11/2. */
4026FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
4027
4028/** Opcode 0x66 0x0f 0x71 11/2. */
4029FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
4030
4031/** Opcode 0x0f 0x71 11/4. */
4032FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
4033
4034/** Opcode 0x66 0x0f 0x71 11/4. */
4035FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
4036
4037/** Opcode 0x0f 0x71 11/6. */
4038FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
4039
4040/** Opcode 0x66 0x0f 0x71 11/6. */
4041FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
4042
4043
4044/**
4045 * Group 12 jump table for register variant.
4046 */
4047IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
4048{
4049 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4050 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4051 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4052 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4053 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4054 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4055 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4056 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4057};
4058AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
4059
4060
4061/** Opcode 0x0f 0x71. */
4062FNIEMOP_DEF(iemOp_Grp12)
4063{
4064 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4065 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4066 /* register, register */
4067 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4068 + pVCpu->iem.s.idxPrefix], bRm);
4069 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4070}
4071
4072
4073/** Opcode 0x0f 0x72 11/2. */
4074FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
4075
4076/** Opcode 0x66 0x0f 0x72 11/2. */
4077FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
4078
4079/** Opcode 0x0f 0x72 11/4. */
4080FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
4081
4082/** Opcode 0x66 0x0f 0x72 11/4. */
4083FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
4084
4085/** Opcode 0x0f 0x72 11/6. */
4086FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
4087
4088/** Opcode 0x66 0x0f 0x72 11/6. */
4089FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
4090
4091
4092/**
4093 * Group 13 jump table for register variant.
4094 */
4095IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
4096{
4097 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4098 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4099 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4100 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4101 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4102 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4103 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4104 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4105};
4106AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
4107
4108/** Opcode 0x0f 0x72. */
4109FNIEMOP_DEF(iemOp_Grp13)
4110{
4111 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4112 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4113 /* register, register */
4114 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4115 + pVCpu->iem.s.idxPrefix], bRm);
4116 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4117}
4118
4119
4120/** Opcode 0x0f 0x73 11/2. */
4121FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
4122
4123/** Opcode 0x66 0x0f 0x73 11/2. */
4124FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
4125
4126/** Opcode 0x66 0x0f 0x73 11/3. */
4127FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
4128
4129/** Opcode 0x0f 0x73 11/6. */
4130FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
4131
4132/** Opcode 0x66 0x0f 0x73 11/6. */
4133FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
4134
4135/** Opcode 0x66 0x0f 0x73 11/7. */
4136FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
4137
4138/**
4139 * Group 14 jump table for register variant.
4140 */
4141IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4142{
4143 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4144 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4145 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4146 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4147 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4148 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4149 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4150 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4151};
4152AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4153
4154
4155/** Opcode 0x0f 0x73. */
4156FNIEMOP_DEF(iemOp_Grp14)
4157{
4158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4159 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4160 /* register, register */
4161 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4162 + pVCpu->iem.s.idxPrefix], bRm);
4163 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4164}
4165
4166
4167/**
4168 * Common worker for MMX instructions on the form:
4169 * pxxx mm1, mm2/mem64
4170 */
4171FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4172{
4173 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4174 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4175 {
4176 /*
4177 * Register, register.
4178 */
4179 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4180 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4182 IEM_MC_BEGIN(2, 0);
4183 IEM_MC_ARG(uint64_t *, pDst, 0);
4184 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4185 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4186 IEM_MC_PREPARE_FPU_USAGE();
4187 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4188 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
4189 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4190 IEM_MC_ADVANCE_RIP();
4191 IEM_MC_END();
4192 }
4193 else
4194 {
4195 /*
4196 * Register, memory.
4197 */
4198 IEM_MC_BEGIN(2, 2);
4199 IEM_MC_ARG(uint64_t *, pDst, 0);
4200 IEM_MC_LOCAL(uint64_t, uSrc);
4201 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4203
4204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4206 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4207 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4208
4209 IEM_MC_PREPARE_FPU_USAGE();
4210 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4211 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4212
4213 IEM_MC_ADVANCE_RIP();
4214 IEM_MC_END();
4215 }
4216 return VINF_SUCCESS;
4217}
4218
4219
4220/**
4221 * Common worker for SSE2 instructions on the forms:
4222 * pxxx xmm1, xmm2/mem128
4223 *
4224 * Proper alignment of the 128-bit operand is enforced.
4225 * Exceptions type 4. SSE2 cpuid checks.
4226 */
4227FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4228{
4229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4230 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4231 {
4232 /*
4233 * Register, register.
4234 */
4235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4236 IEM_MC_BEGIN(2, 0);
4237 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4238 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4239 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4240 IEM_MC_PREPARE_SSE_USAGE();
4241 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4242 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4243 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4244 IEM_MC_ADVANCE_RIP();
4245 IEM_MC_END();
4246 }
4247 else
4248 {
4249 /*
4250 * Register, memory.
4251 */
4252 IEM_MC_BEGIN(2, 2);
4253 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4254 IEM_MC_LOCAL(RTUINT128U, uSrc);
4255 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4257
4258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4260 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4261 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4262
4263 IEM_MC_PREPARE_SSE_USAGE();
4264 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4265 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4266
4267 IEM_MC_ADVANCE_RIP();
4268 IEM_MC_END();
4269 }
4270 return VINF_SUCCESS;
4271}
4272
4273
4274/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4275FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4276{
4277 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
4278 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4279}
4280
4281/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4282FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4283{
4284 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
4285 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4286}
4287
4288/* Opcode 0xf3 0x0f 0x74 - invalid */
4289/* Opcode 0xf2 0x0f 0x74 - invalid */
4290
4291
4292/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4293FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4294{
4295 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
4296 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4297}
4298
4299/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4300FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4301{
4302 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
4303 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4304}
4305
4306/* Opcode 0xf3 0x0f 0x75 - invalid */
4307/* Opcode 0xf2 0x0f 0x75 - invalid */
4308
4309
4310/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4311FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4312{
4313 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
4314 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4315}
4316
4317/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4318FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4319{
4320 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
4321 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4322}
4323
4324/* Opcode 0xf3 0x0f 0x76 - invalid */
4325/* Opcode 0xf2 0x0f 0x76 - invalid */
4326
4327
4328/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4329FNIEMOP_DEF(iemOp_emms)
4330{
4331 IEMOP_MNEMONIC(emms, "emms");
4332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4333
4334 IEM_MC_BEGIN(0,0);
4335 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
4336 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4337 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4338 IEM_MC_FPU_FROM_MMX_MODE();
4339 IEM_MC_ADVANCE_RIP();
4340 IEM_MC_END();
4341 return VINF_SUCCESS;
4342}
4343
4344/* Opcode 0x66 0x0f 0x77 - invalid */
4345/* Opcode 0xf3 0x0f 0x77 - invalid */
4346/* Opcode 0xf2 0x0f 0x77 - invalid */
4347
4348/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4349FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4350/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4351FNIEMOP_STUB(iemOp_AmdGrp17);
4352/* Opcode 0xf3 0x0f 0x78 - invalid */
4353/* Opcode 0xf2 0x0f 0x78 - invalid */
4354
4355/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4356FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4357/* Opcode 0x66 0x0f 0x79 - invalid */
4358/* Opcode 0xf3 0x0f 0x79 - invalid */
4359/* Opcode 0xf2 0x0f 0x79 - invalid */
4360
4361/* Opcode 0x0f 0x7a - invalid */
4362/* Opcode 0x66 0x0f 0x7a - invalid */
4363/* Opcode 0xf3 0x0f 0x7a - invalid */
4364/* Opcode 0xf2 0x0f 0x7a - invalid */
4365
4366/* Opcode 0x0f 0x7b - invalid */
4367/* Opcode 0x66 0x0f 0x7b - invalid */
4368/* Opcode 0xf3 0x0f 0x7b - invalid */
4369/* Opcode 0xf2 0x0f 0x7b - invalid */
4370
4371/* Opcode 0x0f 0x7c - invalid */
4372/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4373FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4374/* Opcode 0xf3 0x0f 0x7c - invalid */
4375/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4376FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4377
4378/* Opcode 0x0f 0x7d - invalid */
4379/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4380FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4381/* Opcode 0xf3 0x0f 0x7d - invalid */
4382/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4383FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4384
4385
4386/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4387FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4388{
4389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4390 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4391 {
4392 /**
4393 * @opcode 0x7e
4394 * @opcodesub rex.w=1
4395 * @oppfx none
4396 * @opcpuid mmx
4397 * @opgroup og_mmx_datamove
4398 * @opxcpttype 5
4399 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
4400 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
4401 */
4402 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4403 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4404 {
4405 /* greg64, MMX */
4406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4407 IEM_MC_BEGIN(0, 1);
4408 IEM_MC_LOCAL(uint64_t, u64Tmp);
4409
4410 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4411 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4412
4413 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4414 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4415 IEM_MC_FPU_TO_MMX_MODE();
4416
4417 IEM_MC_ADVANCE_RIP();
4418 IEM_MC_END();
4419 }
4420 else
4421 {
4422 /* [mem64], MMX */
4423 IEM_MC_BEGIN(0, 2);
4424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4425 IEM_MC_LOCAL(uint64_t, u64Tmp);
4426
4427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4429 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4430 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4431
4432 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4433 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4434 IEM_MC_FPU_TO_MMX_MODE();
4435
4436 IEM_MC_ADVANCE_RIP();
4437 IEM_MC_END();
4438 }
4439 }
4440 else
4441 {
4442 /**
4443 * @opdone
4444 * @opcode 0x7e
4445 * @opcodesub rex.w=0
4446 * @oppfx none
4447 * @opcpuid mmx
4448 * @opgroup og_mmx_datamove
4449 * @opxcpttype 5
4450 * @opfunction iemOp_movd_q_Pd_Ey
4451 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4452 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4453 */
4454 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4455 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4456 {
4457 /* greg32, MMX */
4458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4459 IEM_MC_BEGIN(0, 1);
4460 IEM_MC_LOCAL(uint32_t, u32Tmp);
4461
4462 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4463 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4464
4465 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4466 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4467 IEM_MC_FPU_TO_MMX_MODE();
4468
4469 IEM_MC_ADVANCE_RIP();
4470 IEM_MC_END();
4471 }
4472 else
4473 {
4474 /* [mem32], MMX */
4475 IEM_MC_BEGIN(0, 2);
4476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4477 IEM_MC_LOCAL(uint32_t, u32Tmp);
4478
4479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4481 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4482 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4483
4484 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4485 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4486 IEM_MC_FPU_TO_MMX_MODE();
4487
4488 IEM_MC_ADVANCE_RIP();
4489 IEM_MC_END();
4490 }
4491 }
4492 return VINF_SUCCESS;
4493
4494}
4495
4496
4497FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4498{
4499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4500 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4501 {
4502 /**
4503 * @opcode 0x7e
4504 * @opcodesub rex.w=1
4505 * @oppfx 0x66
4506 * @opcpuid sse2
4507 * @opgroup og_sse2_simdint_datamove
4508 * @opxcpttype 5
4509 * @optest 64-bit / op1=1 op2=2 -> op1=2
4510 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4511 */
4512 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4513 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4514 {
4515 /* greg64, XMM */
4516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4517 IEM_MC_BEGIN(0, 1);
4518 IEM_MC_LOCAL(uint64_t, u64Tmp);
4519
4520 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4521 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4522
4523 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4524 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4525
4526 IEM_MC_ADVANCE_RIP();
4527 IEM_MC_END();
4528 }
4529 else
4530 {
4531 /* [mem64], XMM */
4532 IEM_MC_BEGIN(0, 2);
4533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4534 IEM_MC_LOCAL(uint64_t, u64Tmp);
4535
4536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4538 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4539 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4540
4541 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4542 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4543
4544 IEM_MC_ADVANCE_RIP();
4545 IEM_MC_END();
4546 }
4547 }
4548 else
4549 {
4550 /**
4551 * @opdone
4552 * @opcode 0x7e
4553 * @opcodesub rex.w=0
4554 * @oppfx 0x66
4555 * @opcpuid sse2
4556 * @opgroup og_sse2_simdint_datamove
4557 * @opxcpttype 5
4558 * @opfunction iemOp_movd_q_Vy_Ey
4559 * @optest op1=1 op2=2 -> op1=2
4560 * @optest op1=0 op2=-42 -> op1=-42
4561 */
4562 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4563 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4564 {
4565 /* greg32, XMM */
4566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4567 IEM_MC_BEGIN(0, 1);
4568 IEM_MC_LOCAL(uint32_t, u32Tmp);
4569
4570 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4571 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4572
4573 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4574 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4575
4576 IEM_MC_ADVANCE_RIP();
4577 IEM_MC_END();
4578 }
4579 else
4580 {
4581 /* [mem32], XMM */
4582 IEM_MC_BEGIN(0, 2);
4583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4584 IEM_MC_LOCAL(uint32_t, u32Tmp);
4585
4586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4588 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4589 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4590
4591 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4592 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4593
4594 IEM_MC_ADVANCE_RIP();
4595 IEM_MC_END();
4596 }
4597 }
4598 return VINF_SUCCESS;
4599
4600}
4601
4602/**
4603 * @opcode 0x7e
4604 * @oppfx 0xf3
4605 * @opcpuid sse2
4606 * @opgroup og_sse2_pcksclr_datamove
4607 * @opxcpttype none
4608 * @optest op1=1 op2=2 -> op1=2
4609 * @optest op1=0 op2=-42 -> op1=-42
4610 */
4611FNIEMOP_DEF(iemOp_movq_Vq_Wq)
4612{
4613 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4614 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4615 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4616 {
4617 /*
4618 * Register, register.
4619 */
4620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4621 IEM_MC_BEGIN(0, 2);
4622 IEM_MC_LOCAL(uint64_t, uSrc);
4623
4624 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4625 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4626
4627 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4628 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4629
4630 IEM_MC_ADVANCE_RIP();
4631 IEM_MC_END();
4632 }
4633 else
4634 {
4635 /*
4636 * Memory, register.
4637 */
4638 IEM_MC_BEGIN(0, 2);
4639 IEM_MC_LOCAL(uint64_t, uSrc);
4640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4641
4642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4644 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4645 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4646
4647 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4648 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4649
4650 IEM_MC_ADVANCE_RIP();
4651 IEM_MC_END();
4652 }
4653 return VINF_SUCCESS;
4654}
4655
4656/* Opcode 0xf2 0x0f 0x7e - invalid */
4657
4658
4659/** Opcode 0x0f 0x7f - movq Qq, Pq */
4660FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4661{
4662 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4663 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4664 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4665 {
4666 /*
4667 * Register, register.
4668 */
4669 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4670 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4672 IEM_MC_BEGIN(0, 1);
4673 IEM_MC_LOCAL(uint64_t, u64Tmp);
4674 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4675 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4676 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4677 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4678 IEM_MC_ADVANCE_RIP();
4679 IEM_MC_END();
4680 }
4681 else
4682 {
4683 /*
4684 * Register, memory.
4685 */
4686 IEM_MC_BEGIN(0, 2);
4687 IEM_MC_LOCAL(uint64_t, u64Tmp);
4688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4689
4690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4692 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4693 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4694
4695 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4696 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4697
4698 IEM_MC_ADVANCE_RIP();
4699 IEM_MC_END();
4700 }
4701 return VINF_SUCCESS;
4702}
4703
4704/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4705FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4706{
4707 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4708 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4709 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4710 {
4711 /*
4712 * Register, register.
4713 */
4714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4715 IEM_MC_BEGIN(0, 0);
4716 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4717 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4718 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4719 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4720 IEM_MC_ADVANCE_RIP();
4721 IEM_MC_END();
4722 }
4723 else
4724 {
4725 /*
4726 * Register, memory.
4727 */
4728 IEM_MC_BEGIN(0, 2);
4729 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4731
4732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4734 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4735 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4736
4737 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4738 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4739
4740 IEM_MC_ADVANCE_RIP();
4741 IEM_MC_END();
4742 }
4743 return VINF_SUCCESS;
4744}
4745
4746/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4747FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4748{
4749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4750 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4751 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4752 {
4753 /*
4754 * Register, register.
4755 */
4756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4757 IEM_MC_BEGIN(0, 0);
4758 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4759 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4760 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4761 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4762 IEM_MC_ADVANCE_RIP();
4763 IEM_MC_END();
4764 }
4765 else
4766 {
4767 /*
4768 * Register, memory.
4769 */
4770 IEM_MC_BEGIN(0, 2);
4771 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4773
4774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4776 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4777 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4778
4779 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4780 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4781
4782 IEM_MC_ADVANCE_RIP();
4783 IEM_MC_END();
4784 }
4785 return VINF_SUCCESS;
4786}
4787
4788/* Opcode 0xf2 0x0f 0x7f - invalid */
4789
4790
4791
4792/** Opcode 0x0f 0x80. */
4793FNIEMOP_DEF(iemOp_jo_Jv)
4794{
4795 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4796 IEMOP_HLP_MIN_386();
4797 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4798 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4799 {
4800 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4802
4803 IEM_MC_BEGIN(0, 0);
4804 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4805 IEM_MC_REL_JMP_S16(i16Imm);
4806 } IEM_MC_ELSE() {
4807 IEM_MC_ADVANCE_RIP();
4808 } IEM_MC_ENDIF();
4809 IEM_MC_END();
4810 }
4811 else
4812 {
4813 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4815
4816 IEM_MC_BEGIN(0, 0);
4817 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4818 IEM_MC_REL_JMP_S32(i32Imm);
4819 } IEM_MC_ELSE() {
4820 IEM_MC_ADVANCE_RIP();
4821 } IEM_MC_ENDIF();
4822 IEM_MC_END();
4823 }
4824 return VINF_SUCCESS;
4825}
4826
4827
4828/** Opcode 0x0f 0x81. */
4829FNIEMOP_DEF(iemOp_jno_Jv)
4830{
4831 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4832 IEMOP_HLP_MIN_386();
4833 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4834 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4835 {
4836 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4838
4839 IEM_MC_BEGIN(0, 0);
4840 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4841 IEM_MC_ADVANCE_RIP();
4842 } IEM_MC_ELSE() {
4843 IEM_MC_REL_JMP_S16(i16Imm);
4844 } IEM_MC_ENDIF();
4845 IEM_MC_END();
4846 }
4847 else
4848 {
4849 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4851
4852 IEM_MC_BEGIN(0, 0);
4853 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4854 IEM_MC_ADVANCE_RIP();
4855 } IEM_MC_ELSE() {
4856 IEM_MC_REL_JMP_S32(i32Imm);
4857 } IEM_MC_ENDIF();
4858 IEM_MC_END();
4859 }
4860 return VINF_SUCCESS;
4861}
4862
4863
4864/** Opcode 0x0f 0x82. */
4865FNIEMOP_DEF(iemOp_jc_Jv)
4866{
4867 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4868 IEMOP_HLP_MIN_386();
4869 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4870 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4871 {
4872 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4874
4875 IEM_MC_BEGIN(0, 0);
4876 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4877 IEM_MC_REL_JMP_S16(i16Imm);
4878 } IEM_MC_ELSE() {
4879 IEM_MC_ADVANCE_RIP();
4880 } IEM_MC_ENDIF();
4881 IEM_MC_END();
4882 }
4883 else
4884 {
4885 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4887
4888 IEM_MC_BEGIN(0, 0);
4889 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4890 IEM_MC_REL_JMP_S32(i32Imm);
4891 } IEM_MC_ELSE() {
4892 IEM_MC_ADVANCE_RIP();
4893 } IEM_MC_ENDIF();
4894 IEM_MC_END();
4895 }
4896 return VINF_SUCCESS;
4897}
4898
4899
4900/** Opcode 0x0f 0x83. */
4901FNIEMOP_DEF(iemOp_jnc_Jv)
4902{
4903 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4904 IEMOP_HLP_MIN_386();
4905 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4906 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4907 {
4908 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4910
4911 IEM_MC_BEGIN(0, 0);
4912 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4913 IEM_MC_ADVANCE_RIP();
4914 } IEM_MC_ELSE() {
4915 IEM_MC_REL_JMP_S16(i16Imm);
4916 } IEM_MC_ENDIF();
4917 IEM_MC_END();
4918 }
4919 else
4920 {
4921 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4923
4924 IEM_MC_BEGIN(0, 0);
4925 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4926 IEM_MC_ADVANCE_RIP();
4927 } IEM_MC_ELSE() {
4928 IEM_MC_REL_JMP_S32(i32Imm);
4929 } IEM_MC_ENDIF();
4930 IEM_MC_END();
4931 }
4932 return VINF_SUCCESS;
4933}
4934
4935
4936/** Opcode 0x0f 0x84. */
4937FNIEMOP_DEF(iemOp_je_Jv)
4938{
4939 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4940 IEMOP_HLP_MIN_386();
4941 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4942 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4943 {
4944 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4946
4947 IEM_MC_BEGIN(0, 0);
4948 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4949 IEM_MC_REL_JMP_S16(i16Imm);
4950 } IEM_MC_ELSE() {
4951 IEM_MC_ADVANCE_RIP();
4952 } IEM_MC_ENDIF();
4953 IEM_MC_END();
4954 }
4955 else
4956 {
4957 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4959
4960 IEM_MC_BEGIN(0, 0);
4961 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4962 IEM_MC_REL_JMP_S32(i32Imm);
4963 } IEM_MC_ELSE() {
4964 IEM_MC_ADVANCE_RIP();
4965 } IEM_MC_ENDIF();
4966 IEM_MC_END();
4967 }
4968 return VINF_SUCCESS;
4969}
4970
4971
4972/** Opcode 0x0f 0x85. */
4973FNIEMOP_DEF(iemOp_jne_Jv)
4974{
4975 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4976 IEMOP_HLP_MIN_386();
4977 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4978 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4979 {
4980 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4982
4983 IEM_MC_BEGIN(0, 0);
4984 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4985 IEM_MC_ADVANCE_RIP();
4986 } IEM_MC_ELSE() {
4987 IEM_MC_REL_JMP_S16(i16Imm);
4988 } IEM_MC_ENDIF();
4989 IEM_MC_END();
4990 }
4991 else
4992 {
4993 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4995
4996 IEM_MC_BEGIN(0, 0);
4997 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4998 IEM_MC_ADVANCE_RIP();
4999 } IEM_MC_ELSE() {
5000 IEM_MC_REL_JMP_S32(i32Imm);
5001 } IEM_MC_ENDIF();
5002 IEM_MC_END();
5003 }
5004 return VINF_SUCCESS;
5005}
5006
5007
5008/** Opcode 0x0f 0x86. */
5009FNIEMOP_DEF(iemOp_jbe_Jv)
5010{
5011 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
5012 IEMOP_HLP_MIN_386();
5013 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5014 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5015 {
5016 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5018
5019 IEM_MC_BEGIN(0, 0);
5020 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5021 IEM_MC_REL_JMP_S16(i16Imm);
5022 } IEM_MC_ELSE() {
5023 IEM_MC_ADVANCE_RIP();
5024 } IEM_MC_ENDIF();
5025 IEM_MC_END();
5026 }
5027 else
5028 {
5029 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5031
5032 IEM_MC_BEGIN(0, 0);
5033 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5034 IEM_MC_REL_JMP_S32(i32Imm);
5035 } IEM_MC_ELSE() {
5036 IEM_MC_ADVANCE_RIP();
5037 } IEM_MC_ENDIF();
5038 IEM_MC_END();
5039 }
5040 return VINF_SUCCESS;
5041}
5042
5043
5044/** Opcode 0x0f 0x87. */
5045FNIEMOP_DEF(iemOp_jnbe_Jv)
5046{
5047 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
5048 IEMOP_HLP_MIN_386();
5049 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5050 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5051 {
5052 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5054
5055 IEM_MC_BEGIN(0, 0);
5056 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5057 IEM_MC_ADVANCE_RIP();
5058 } IEM_MC_ELSE() {
5059 IEM_MC_REL_JMP_S16(i16Imm);
5060 } IEM_MC_ENDIF();
5061 IEM_MC_END();
5062 }
5063 else
5064 {
5065 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5067
5068 IEM_MC_BEGIN(0, 0);
5069 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5070 IEM_MC_ADVANCE_RIP();
5071 } IEM_MC_ELSE() {
5072 IEM_MC_REL_JMP_S32(i32Imm);
5073 } IEM_MC_ENDIF();
5074 IEM_MC_END();
5075 }
5076 return VINF_SUCCESS;
5077}
5078
5079
5080/** Opcode 0x0f 0x88. */
5081FNIEMOP_DEF(iemOp_js_Jv)
5082{
5083 IEMOP_MNEMONIC(js_Jv, "js Jv");
5084 IEMOP_HLP_MIN_386();
5085 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5086 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5087 {
5088 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5090
5091 IEM_MC_BEGIN(0, 0);
5092 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5093 IEM_MC_REL_JMP_S16(i16Imm);
5094 } IEM_MC_ELSE() {
5095 IEM_MC_ADVANCE_RIP();
5096 } IEM_MC_ENDIF();
5097 IEM_MC_END();
5098 }
5099 else
5100 {
5101 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5103
5104 IEM_MC_BEGIN(0, 0);
5105 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5106 IEM_MC_REL_JMP_S32(i32Imm);
5107 } IEM_MC_ELSE() {
5108 IEM_MC_ADVANCE_RIP();
5109 } IEM_MC_ENDIF();
5110 IEM_MC_END();
5111 }
5112 return VINF_SUCCESS;
5113}
5114
5115
5116/** Opcode 0x0f 0x89. */
5117FNIEMOP_DEF(iemOp_jns_Jv)
5118{
5119 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
5120 IEMOP_HLP_MIN_386();
5121 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5122 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5123 {
5124 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5126
5127 IEM_MC_BEGIN(0, 0);
5128 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5129 IEM_MC_ADVANCE_RIP();
5130 } IEM_MC_ELSE() {
5131 IEM_MC_REL_JMP_S16(i16Imm);
5132 } IEM_MC_ENDIF();
5133 IEM_MC_END();
5134 }
5135 else
5136 {
5137 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5139
5140 IEM_MC_BEGIN(0, 0);
5141 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5142 IEM_MC_ADVANCE_RIP();
5143 } IEM_MC_ELSE() {
5144 IEM_MC_REL_JMP_S32(i32Imm);
5145 } IEM_MC_ENDIF();
5146 IEM_MC_END();
5147 }
5148 return VINF_SUCCESS;
5149}
5150
5151
5152/** Opcode 0x0f 0x8a. */
5153FNIEMOP_DEF(iemOp_jp_Jv)
5154{
5155 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
5156 IEMOP_HLP_MIN_386();
5157 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5158 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5159 {
5160 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5162
5163 IEM_MC_BEGIN(0, 0);
5164 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5165 IEM_MC_REL_JMP_S16(i16Imm);
5166 } IEM_MC_ELSE() {
5167 IEM_MC_ADVANCE_RIP();
5168 } IEM_MC_ENDIF();
5169 IEM_MC_END();
5170 }
5171 else
5172 {
5173 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5175
5176 IEM_MC_BEGIN(0, 0);
5177 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5178 IEM_MC_REL_JMP_S32(i32Imm);
5179 } IEM_MC_ELSE() {
5180 IEM_MC_ADVANCE_RIP();
5181 } IEM_MC_ENDIF();
5182 IEM_MC_END();
5183 }
5184 return VINF_SUCCESS;
5185}
5186
5187
5188/** Opcode 0x0f 0x8b. */
5189FNIEMOP_DEF(iemOp_jnp_Jv)
5190{
5191 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
5192 IEMOP_HLP_MIN_386();
5193 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5194 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5195 {
5196 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5198
5199 IEM_MC_BEGIN(0, 0);
5200 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5201 IEM_MC_ADVANCE_RIP();
5202 } IEM_MC_ELSE() {
5203 IEM_MC_REL_JMP_S16(i16Imm);
5204 } IEM_MC_ENDIF();
5205 IEM_MC_END();
5206 }
5207 else
5208 {
5209 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5211
5212 IEM_MC_BEGIN(0, 0);
5213 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5214 IEM_MC_ADVANCE_RIP();
5215 } IEM_MC_ELSE() {
5216 IEM_MC_REL_JMP_S32(i32Imm);
5217 } IEM_MC_ENDIF();
5218 IEM_MC_END();
5219 }
5220 return VINF_SUCCESS;
5221}
5222
5223
5224/** Opcode 0x0f 0x8c. */
5225FNIEMOP_DEF(iemOp_jl_Jv)
5226{
5227 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
5228 IEMOP_HLP_MIN_386();
5229 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5230 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5231 {
5232 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5234
5235 IEM_MC_BEGIN(0, 0);
5236 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5237 IEM_MC_REL_JMP_S16(i16Imm);
5238 } IEM_MC_ELSE() {
5239 IEM_MC_ADVANCE_RIP();
5240 } IEM_MC_ENDIF();
5241 IEM_MC_END();
5242 }
5243 else
5244 {
5245 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5247
5248 IEM_MC_BEGIN(0, 0);
5249 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5250 IEM_MC_REL_JMP_S32(i32Imm);
5251 } IEM_MC_ELSE() {
5252 IEM_MC_ADVANCE_RIP();
5253 } IEM_MC_ENDIF();
5254 IEM_MC_END();
5255 }
5256 return VINF_SUCCESS;
5257}
5258
5259
5260/** Opcode 0x0f 0x8d. */
5261FNIEMOP_DEF(iemOp_jnl_Jv)
5262{
5263 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5264 IEMOP_HLP_MIN_386();
5265 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5266 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5267 {
5268 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5270
5271 IEM_MC_BEGIN(0, 0);
5272 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5273 IEM_MC_ADVANCE_RIP();
5274 } IEM_MC_ELSE() {
5275 IEM_MC_REL_JMP_S16(i16Imm);
5276 } IEM_MC_ENDIF();
5277 IEM_MC_END();
5278 }
5279 else
5280 {
5281 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5283
5284 IEM_MC_BEGIN(0, 0);
5285 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5286 IEM_MC_ADVANCE_RIP();
5287 } IEM_MC_ELSE() {
5288 IEM_MC_REL_JMP_S32(i32Imm);
5289 } IEM_MC_ENDIF();
5290 IEM_MC_END();
5291 }
5292 return VINF_SUCCESS;
5293}
5294
5295
5296/** Opcode 0x0f 0x8e. */
5297FNIEMOP_DEF(iemOp_jle_Jv)
5298{
5299 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5300 IEMOP_HLP_MIN_386();
5301 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5302 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5303 {
5304 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5306
5307 IEM_MC_BEGIN(0, 0);
5308 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5309 IEM_MC_REL_JMP_S16(i16Imm);
5310 } IEM_MC_ELSE() {
5311 IEM_MC_ADVANCE_RIP();
5312 } IEM_MC_ENDIF();
5313 IEM_MC_END();
5314 }
5315 else
5316 {
5317 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5319
5320 IEM_MC_BEGIN(0, 0);
5321 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5322 IEM_MC_REL_JMP_S32(i32Imm);
5323 } IEM_MC_ELSE() {
5324 IEM_MC_ADVANCE_RIP();
5325 } IEM_MC_ENDIF();
5326 IEM_MC_END();
5327 }
5328 return VINF_SUCCESS;
5329}
5330
5331
5332/** Opcode 0x0f 0x8f. */
5333FNIEMOP_DEF(iemOp_jnle_Jv)
5334{
5335 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5336 IEMOP_HLP_MIN_386();
5337 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5338 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5339 {
5340 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5342
5343 IEM_MC_BEGIN(0, 0);
5344 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5345 IEM_MC_ADVANCE_RIP();
5346 } IEM_MC_ELSE() {
5347 IEM_MC_REL_JMP_S16(i16Imm);
5348 } IEM_MC_ENDIF();
5349 IEM_MC_END();
5350 }
5351 else
5352 {
5353 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5355
5356 IEM_MC_BEGIN(0, 0);
5357 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5358 IEM_MC_ADVANCE_RIP();
5359 } IEM_MC_ELSE() {
5360 IEM_MC_REL_JMP_S32(i32Imm);
5361 } IEM_MC_ENDIF();
5362 IEM_MC_END();
5363 }
5364 return VINF_SUCCESS;
5365}
5366
5367
5368/** Opcode 0x0f 0x90. */
5369FNIEMOP_DEF(iemOp_seto_Eb)
5370{
5371 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5372 IEMOP_HLP_MIN_386();
5373 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5374
5375 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5376 * any way. AMD says it's "unused", whatever that means. We're
5377 * ignoring for now. */
5378 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5379 {
5380 /* register target */
5381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5382 IEM_MC_BEGIN(0, 0);
5383 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5384 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5385 } IEM_MC_ELSE() {
5386 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5387 } IEM_MC_ENDIF();
5388 IEM_MC_ADVANCE_RIP();
5389 IEM_MC_END();
5390 }
5391 else
5392 {
5393 /* memory target */
5394 IEM_MC_BEGIN(0, 1);
5395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5398 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5399 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5400 } IEM_MC_ELSE() {
5401 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5402 } IEM_MC_ENDIF();
5403 IEM_MC_ADVANCE_RIP();
5404 IEM_MC_END();
5405 }
5406 return VINF_SUCCESS;
5407}
5408
5409
5410/** Opcode 0x0f 0x91. */
5411FNIEMOP_DEF(iemOp_setno_Eb)
5412{
5413 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5414 IEMOP_HLP_MIN_386();
5415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5416
5417 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5418 * any way. AMD says it's "unused", whatever that means. We're
5419 * ignoring for now. */
5420 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5421 {
5422 /* register target */
5423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5424 IEM_MC_BEGIN(0, 0);
5425 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5426 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5427 } IEM_MC_ELSE() {
5428 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5429 } IEM_MC_ENDIF();
5430 IEM_MC_ADVANCE_RIP();
5431 IEM_MC_END();
5432 }
5433 else
5434 {
5435 /* memory target */
5436 IEM_MC_BEGIN(0, 1);
5437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5440 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5441 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5442 } IEM_MC_ELSE() {
5443 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5444 } IEM_MC_ENDIF();
5445 IEM_MC_ADVANCE_RIP();
5446 IEM_MC_END();
5447 }
5448 return VINF_SUCCESS;
5449}
5450
5451
5452/** Opcode 0x0f 0x92. */
5453FNIEMOP_DEF(iemOp_setc_Eb)
5454{
5455 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5456 IEMOP_HLP_MIN_386();
5457 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5458
5459 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5460 * any way. AMD says it's "unused", whatever that means. We're
5461 * ignoring for now. */
5462 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5463 {
5464 /* register target */
5465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5466 IEM_MC_BEGIN(0, 0);
5467 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5468 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5469 } IEM_MC_ELSE() {
5470 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5471 } IEM_MC_ENDIF();
5472 IEM_MC_ADVANCE_RIP();
5473 IEM_MC_END();
5474 }
5475 else
5476 {
5477 /* memory target */
5478 IEM_MC_BEGIN(0, 1);
5479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5480 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5482 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5483 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5484 } IEM_MC_ELSE() {
5485 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5486 } IEM_MC_ENDIF();
5487 IEM_MC_ADVANCE_RIP();
5488 IEM_MC_END();
5489 }
5490 return VINF_SUCCESS;
5491}
5492
5493
5494/** Opcode 0x0f 0x93. */
5495FNIEMOP_DEF(iemOp_setnc_Eb)
5496{
5497 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5498 IEMOP_HLP_MIN_386();
5499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5500
5501 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5502 * any way. AMD says it's "unused", whatever that means. We're
5503 * ignoring for now. */
5504 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5505 {
5506 /* register target */
5507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5508 IEM_MC_BEGIN(0, 0);
5509 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5510 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5511 } IEM_MC_ELSE() {
5512 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5513 } IEM_MC_ENDIF();
5514 IEM_MC_ADVANCE_RIP();
5515 IEM_MC_END();
5516 }
5517 else
5518 {
5519 /* memory target */
5520 IEM_MC_BEGIN(0, 1);
5521 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5524 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5525 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5526 } IEM_MC_ELSE() {
5527 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5528 } IEM_MC_ENDIF();
5529 IEM_MC_ADVANCE_RIP();
5530 IEM_MC_END();
5531 }
5532 return VINF_SUCCESS;
5533}
5534
5535
5536/** Opcode 0x0f 0x94. */
5537FNIEMOP_DEF(iemOp_sete_Eb)
5538{
5539 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5540 IEMOP_HLP_MIN_386();
5541 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5542
5543 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5544 * any way. AMD says it's "unused", whatever that means. We're
5545 * ignoring for now. */
5546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5547 {
5548 /* register target */
5549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5550 IEM_MC_BEGIN(0, 0);
5551 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5552 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5553 } IEM_MC_ELSE() {
5554 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5555 } IEM_MC_ENDIF();
5556 IEM_MC_ADVANCE_RIP();
5557 IEM_MC_END();
5558 }
5559 else
5560 {
5561 /* memory target */
5562 IEM_MC_BEGIN(0, 1);
5563 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5566 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5567 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5568 } IEM_MC_ELSE() {
5569 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5570 } IEM_MC_ENDIF();
5571 IEM_MC_ADVANCE_RIP();
5572 IEM_MC_END();
5573 }
5574 return VINF_SUCCESS;
5575}
5576
5577
5578/** Opcode 0x0f 0x95. */
5579FNIEMOP_DEF(iemOp_setne_Eb)
5580{
5581 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5582 IEMOP_HLP_MIN_386();
5583 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5584
5585 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5586 * any way. AMD says it's "unused", whatever that means. We're
5587 * ignoring for now. */
5588 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5589 {
5590 /* register target */
5591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5592 IEM_MC_BEGIN(0, 0);
5593 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5594 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5595 } IEM_MC_ELSE() {
5596 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5597 } IEM_MC_ENDIF();
5598 IEM_MC_ADVANCE_RIP();
5599 IEM_MC_END();
5600 }
5601 else
5602 {
5603 /* memory target */
5604 IEM_MC_BEGIN(0, 1);
5605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5608 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5609 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5610 } IEM_MC_ELSE() {
5611 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5612 } IEM_MC_ENDIF();
5613 IEM_MC_ADVANCE_RIP();
5614 IEM_MC_END();
5615 }
5616 return VINF_SUCCESS;
5617}
5618
5619
5620/** Opcode 0x0f 0x96. */
5621FNIEMOP_DEF(iemOp_setbe_Eb)
5622{
5623 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5624 IEMOP_HLP_MIN_386();
5625 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5626
5627 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5628 * any way. AMD says it's "unused", whatever that means. We're
5629 * ignoring for now. */
5630 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5631 {
5632 /* register target */
5633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5634 IEM_MC_BEGIN(0, 0);
5635 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5636 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5637 } IEM_MC_ELSE() {
5638 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5639 } IEM_MC_ENDIF();
5640 IEM_MC_ADVANCE_RIP();
5641 IEM_MC_END();
5642 }
5643 else
5644 {
5645 /* memory target */
5646 IEM_MC_BEGIN(0, 1);
5647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5650 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5651 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5652 } IEM_MC_ELSE() {
5653 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5654 } IEM_MC_ENDIF();
5655 IEM_MC_ADVANCE_RIP();
5656 IEM_MC_END();
5657 }
5658 return VINF_SUCCESS;
5659}
5660
5661
5662/** Opcode 0x0f 0x97. */
5663FNIEMOP_DEF(iemOp_setnbe_Eb)
5664{
5665 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5666 IEMOP_HLP_MIN_386();
5667 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5668
5669 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5670 * any way. AMD says it's "unused", whatever that means. We're
5671 * ignoring for now. */
5672 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5673 {
5674 /* register target */
5675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5676 IEM_MC_BEGIN(0, 0);
5677 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5678 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5679 } IEM_MC_ELSE() {
5680 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5681 } IEM_MC_ENDIF();
5682 IEM_MC_ADVANCE_RIP();
5683 IEM_MC_END();
5684 }
5685 else
5686 {
5687 /* memory target */
5688 IEM_MC_BEGIN(0, 1);
5689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5692 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5693 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5694 } IEM_MC_ELSE() {
5695 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5696 } IEM_MC_ENDIF();
5697 IEM_MC_ADVANCE_RIP();
5698 IEM_MC_END();
5699 }
5700 return VINF_SUCCESS;
5701}
5702
5703
5704/** Opcode 0x0f 0x98. */
5705FNIEMOP_DEF(iemOp_sets_Eb)
5706{
5707 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5708 IEMOP_HLP_MIN_386();
5709 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5710
5711 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5712 * any way. AMD says it's "unused", whatever that means. We're
5713 * ignoring for now. */
5714 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5715 {
5716 /* register target */
5717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5718 IEM_MC_BEGIN(0, 0);
5719 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5720 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5721 } IEM_MC_ELSE() {
5722 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5723 } IEM_MC_ENDIF();
5724 IEM_MC_ADVANCE_RIP();
5725 IEM_MC_END();
5726 }
5727 else
5728 {
5729 /* memory target */
5730 IEM_MC_BEGIN(0, 1);
5731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5734 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5735 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5736 } IEM_MC_ELSE() {
5737 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5738 } IEM_MC_ENDIF();
5739 IEM_MC_ADVANCE_RIP();
5740 IEM_MC_END();
5741 }
5742 return VINF_SUCCESS;
5743}
5744
5745
5746/** Opcode 0x0f 0x99. */
5747FNIEMOP_DEF(iemOp_setns_Eb)
5748{
5749 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5750 IEMOP_HLP_MIN_386();
5751 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5752
5753 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5754 * any way. AMD says it's "unused", whatever that means. We're
5755 * ignoring for now. */
5756 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5757 {
5758 /* register target */
5759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5760 IEM_MC_BEGIN(0, 0);
5761 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5762 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5763 } IEM_MC_ELSE() {
5764 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5765 } IEM_MC_ENDIF();
5766 IEM_MC_ADVANCE_RIP();
5767 IEM_MC_END();
5768 }
5769 else
5770 {
5771 /* memory target */
5772 IEM_MC_BEGIN(0, 1);
5773 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5776 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5777 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5778 } IEM_MC_ELSE() {
5779 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5780 } IEM_MC_ENDIF();
5781 IEM_MC_ADVANCE_RIP();
5782 IEM_MC_END();
5783 }
5784 return VINF_SUCCESS;
5785}
5786
5787
5788/** Opcode 0x0f 0x9a. */
5789FNIEMOP_DEF(iemOp_setp_Eb)
5790{
5791 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5792 IEMOP_HLP_MIN_386();
5793 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5794
5795 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5796 * any way. AMD says it's "unused", whatever that means. We're
5797 * ignoring for now. */
5798 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5799 {
5800 /* register target */
5801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5802 IEM_MC_BEGIN(0, 0);
5803 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5804 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5805 } IEM_MC_ELSE() {
5806 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5807 } IEM_MC_ENDIF();
5808 IEM_MC_ADVANCE_RIP();
5809 IEM_MC_END();
5810 }
5811 else
5812 {
5813 /* memory target */
5814 IEM_MC_BEGIN(0, 1);
5815 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5818 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5819 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5820 } IEM_MC_ELSE() {
5821 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5822 } IEM_MC_ENDIF();
5823 IEM_MC_ADVANCE_RIP();
5824 IEM_MC_END();
5825 }
5826 return VINF_SUCCESS;
5827}
5828
5829
5830/** Opcode 0x0f 0x9b. */
5831FNIEMOP_DEF(iemOp_setnp_Eb)
5832{
5833 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5834 IEMOP_HLP_MIN_386();
5835 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5836
5837 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5838 * any way. AMD says it's "unused", whatever that means. We're
5839 * ignoring for now. */
5840 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5841 {
5842 /* register target */
5843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5844 IEM_MC_BEGIN(0, 0);
5845 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5846 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5847 } IEM_MC_ELSE() {
5848 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5849 } IEM_MC_ENDIF();
5850 IEM_MC_ADVANCE_RIP();
5851 IEM_MC_END();
5852 }
5853 else
5854 {
5855 /* memory target */
5856 IEM_MC_BEGIN(0, 1);
5857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5860 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5861 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5862 } IEM_MC_ELSE() {
5863 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5864 } IEM_MC_ENDIF();
5865 IEM_MC_ADVANCE_RIP();
5866 IEM_MC_END();
5867 }
5868 return VINF_SUCCESS;
5869}
5870
5871
5872/** Opcode 0x0f 0x9c. */
5873FNIEMOP_DEF(iemOp_setl_Eb)
5874{
5875 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5876 IEMOP_HLP_MIN_386();
5877 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5878
5879 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5880 * any way. AMD says it's "unused", whatever that means. We're
5881 * ignoring for now. */
5882 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5883 {
5884 /* register target */
5885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5886 IEM_MC_BEGIN(0, 0);
5887 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5888 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5889 } IEM_MC_ELSE() {
5890 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5891 } IEM_MC_ENDIF();
5892 IEM_MC_ADVANCE_RIP();
5893 IEM_MC_END();
5894 }
5895 else
5896 {
5897 /* memory target */
5898 IEM_MC_BEGIN(0, 1);
5899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5902 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5903 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5904 } IEM_MC_ELSE() {
5905 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5906 } IEM_MC_ENDIF();
5907 IEM_MC_ADVANCE_RIP();
5908 IEM_MC_END();
5909 }
5910 return VINF_SUCCESS;
5911}
5912
5913
5914/** Opcode 0x0f 0x9d. */
5915FNIEMOP_DEF(iemOp_setnl_Eb)
5916{
5917 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5918 IEMOP_HLP_MIN_386();
5919 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5920
5921 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5922 * any way. AMD says it's "unused", whatever that means. We're
5923 * ignoring for now. */
5924 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5925 {
5926 /* register target */
5927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5928 IEM_MC_BEGIN(0, 0);
5929 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5930 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5931 } IEM_MC_ELSE() {
5932 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5933 } IEM_MC_ENDIF();
5934 IEM_MC_ADVANCE_RIP();
5935 IEM_MC_END();
5936 }
5937 else
5938 {
5939 /* memory target */
5940 IEM_MC_BEGIN(0, 1);
5941 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5944 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5945 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5946 } IEM_MC_ELSE() {
5947 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5948 } IEM_MC_ENDIF();
5949 IEM_MC_ADVANCE_RIP();
5950 IEM_MC_END();
5951 }
5952 return VINF_SUCCESS;
5953}
5954
5955
5956/** Opcode 0x0f 0x9e. */
5957FNIEMOP_DEF(iemOp_setle_Eb)
5958{
5959 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5960 IEMOP_HLP_MIN_386();
5961 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5962
5963 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5964 * any way. AMD says it's "unused", whatever that means. We're
5965 * ignoring for now. */
5966 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5967 {
5968 /* register target */
5969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5970 IEM_MC_BEGIN(0, 0);
5971 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5972 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5973 } IEM_MC_ELSE() {
5974 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5975 } IEM_MC_ENDIF();
5976 IEM_MC_ADVANCE_RIP();
5977 IEM_MC_END();
5978 }
5979 else
5980 {
5981 /* memory target */
5982 IEM_MC_BEGIN(0, 1);
5983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5986 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5987 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5988 } IEM_MC_ELSE() {
5989 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5990 } IEM_MC_ENDIF();
5991 IEM_MC_ADVANCE_RIP();
5992 IEM_MC_END();
5993 }
5994 return VINF_SUCCESS;
5995}
5996
5997
5998/** Opcode 0x0f 0x9f. */
5999FNIEMOP_DEF(iemOp_setnle_Eb)
6000{
6001 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
6002 IEMOP_HLP_MIN_386();
6003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6004
6005 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6006 * any way. AMD says it's "unused", whatever that means. We're
6007 * ignoring for now. */
6008 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6009 {
6010 /* register target */
6011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6012 IEM_MC_BEGIN(0, 0);
6013 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6014 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6015 } IEM_MC_ELSE() {
6016 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6017 } IEM_MC_ENDIF();
6018 IEM_MC_ADVANCE_RIP();
6019 IEM_MC_END();
6020 }
6021 else
6022 {
6023 /* memory target */
6024 IEM_MC_BEGIN(0, 1);
6025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6028 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6029 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6030 } IEM_MC_ELSE() {
6031 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6032 } IEM_MC_ENDIF();
6033 IEM_MC_ADVANCE_RIP();
6034 IEM_MC_END();
6035 }
6036 return VINF_SUCCESS;
6037}
6038
6039
6040/**
6041 * Common 'push segment-register' helper.
6042 */
6043FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
6044{
6045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6046 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
6047 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6048
6049 switch (pVCpu->iem.s.enmEffOpSize)
6050 {
6051 case IEMMODE_16BIT:
6052 IEM_MC_BEGIN(0, 1);
6053 IEM_MC_LOCAL(uint16_t, u16Value);
6054 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
6055 IEM_MC_PUSH_U16(u16Value);
6056 IEM_MC_ADVANCE_RIP();
6057 IEM_MC_END();
6058 break;
6059
6060 case IEMMODE_32BIT:
6061 IEM_MC_BEGIN(0, 1);
6062 IEM_MC_LOCAL(uint32_t, u32Value);
6063 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
6064 IEM_MC_PUSH_U32_SREG(u32Value);
6065 IEM_MC_ADVANCE_RIP();
6066 IEM_MC_END();
6067 break;
6068
6069 case IEMMODE_64BIT:
6070 IEM_MC_BEGIN(0, 1);
6071 IEM_MC_LOCAL(uint64_t, u64Value);
6072 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
6073 IEM_MC_PUSH_U64(u64Value);
6074 IEM_MC_ADVANCE_RIP();
6075 IEM_MC_END();
6076 break;
6077 }
6078
6079 return VINF_SUCCESS;
6080}
6081
6082
6083/** Opcode 0x0f 0xa0. */
6084FNIEMOP_DEF(iemOp_push_fs)
6085{
6086 IEMOP_MNEMONIC(push_fs, "push fs");
6087 IEMOP_HLP_MIN_386();
6088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6089 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
6090}
6091
6092
6093/** Opcode 0x0f 0xa1. */
6094FNIEMOP_DEF(iemOp_pop_fs)
6095{
6096 IEMOP_MNEMONIC(pop_fs, "pop fs");
6097 IEMOP_HLP_MIN_386();
6098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6099 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
6100}
6101
6102
6103/** Opcode 0x0f 0xa2. */
6104FNIEMOP_DEF(iemOp_cpuid)
6105{
6106 IEMOP_MNEMONIC(cpuid, "cpuid");
6107 IEMOP_HLP_MIN_486(); /* not all 486es. */
6108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6109 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
6110}
6111
6112
6113/**
6114 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
6115 * iemOp_bts_Ev_Gv.
6116 */
6117FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
6118{
6119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6120 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6121
6122 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6123 {
6124 /* register destination. */
6125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6126 switch (pVCpu->iem.s.enmEffOpSize)
6127 {
6128 case IEMMODE_16BIT:
6129 IEM_MC_BEGIN(3, 0);
6130 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6131 IEM_MC_ARG(uint16_t, u16Src, 1);
6132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6133
6134 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6135 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
6136 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6137 IEM_MC_REF_EFLAGS(pEFlags);
6138 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6139
6140 IEM_MC_ADVANCE_RIP();
6141 IEM_MC_END();
6142 return VINF_SUCCESS;
6143
6144 case IEMMODE_32BIT:
6145 IEM_MC_BEGIN(3, 0);
6146 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6147 IEM_MC_ARG(uint32_t, u32Src, 1);
6148 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6149
6150 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6151 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
6152 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6153 IEM_MC_REF_EFLAGS(pEFlags);
6154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6155
6156 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6157 IEM_MC_ADVANCE_RIP();
6158 IEM_MC_END();
6159 return VINF_SUCCESS;
6160
6161 case IEMMODE_64BIT:
6162 IEM_MC_BEGIN(3, 0);
6163 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6164 IEM_MC_ARG(uint64_t, u64Src, 1);
6165 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6166
6167 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6168 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
6169 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6170 IEM_MC_REF_EFLAGS(pEFlags);
6171 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6172
6173 IEM_MC_ADVANCE_RIP();
6174 IEM_MC_END();
6175 return VINF_SUCCESS;
6176
6177 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6178 }
6179 }
6180 else
6181 {
6182 /* memory destination. */
6183
6184 uint32_t fAccess;
6185 if (pImpl->pfnLockedU16)
6186 fAccess = IEM_ACCESS_DATA_RW;
6187 else /* BT */
6188 fAccess = IEM_ACCESS_DATA_R;
6189
6190 /** @todo test negative bit offsets! */
6191 switch (pVCpu->iem.s.enmEffOpSize)
6192 {
6193 case IEMMODE_16BIT:
6194 IEM_MC_BEGIN(3, 2);
6195 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6196 IEM_MC_ARG(uint16_t, u16Src, 1);
6197 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6199 IEM_MC_LOCAL(int16_t, i16AddrAdj);
6200
6201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6202 if (pImpl->pfnLockedU16)
6203 IEMOP_HLP_DONE_DECODING();
6204 else
6205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6206 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6207 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
6208 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
6209 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
6210 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
6211 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
6212 IEM_MC_FETCH_EFLAGS(EFlags);
6213
6214 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6215 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6216 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6217 else
6218 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6219 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6220
6221 IEM_MC_COMMIT_EFLAGS(EFlags);
6222 IEM_MC_ADVANCE_RIP();
6223 IEM_MC_END();
6224 return VINF_SUCCESS;
6225
6226 case IEMMODE_32BIT:
6227 IEM_MC_BEGIN(3, 2);
6228 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6229 IEM_MC_ARG(uint32_t, u32Src, 1);
6230 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6232 IEM_MC_LOCAL(int32_t, i32AddrAdj);
6233
6234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6235 if (pImpl->pfnLockedU16)
6236 IEMOP_HLP_DONE_DECODING();
6237 else
6238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6239 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6240 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
6241 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
6242 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
6243 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
6244 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
6245 IEM_MC_FETCH_EFLAGS(EFlags);
6246
6247 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6248 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6249 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6250 else
6251 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6252 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6253
6254 IEM_MC_COMMIT_EFLAGS(EFlags);
6255 IEM_MC_ADVANCE_RIP();
6256 IEM_MC_END();
6257 return VINF_SUCCESS;
6258
6259 case IEMMODE_64BIT:
6260 IEM_MC_BEGIN(3, 2);
6261 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6262 IEM_MC_ARG(uint64_t, u64Src, 1);
6263 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6265 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6266
6267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6268 if (pImpl->pfnLockedU16)
6269 IEMOP_HLP_DONE_DECODING();
6270 else
6271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6272 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6273 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6274 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6275 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6276 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6277 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6278 IEM_MC_FETCH_EFLAGS(EFlags);
6279
6280 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6281 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6282 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6283 else
6284 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6285 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6286
6287 IEM_MC_COMMIT_EFLAGS(EFlags);
6288 IEM_MC_ADVANCE_RIP();
6289 IEM_MC_END();
6290 return VINF_SUCCESS;
6291
6292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6293 }
6294 }
6295}
6296
6297
6298/** Opcode 0x0f 0xa3. */
6299FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6300{
6301 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6302 IEMOP_HLP_MIN_386();
6303 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6304}
6305
6306
6307/**
6308 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6309 */
6310FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6311{
6312 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6313 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6314
6315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6316 {
6317 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6319
6320 switch (pVCpu->iem.s.enmEffOpSize)
6321 {
6322 case IEMMODE_16BIT:
6323 IEM_MC_BEGIN(4, 0);
6324 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6325 IEM_MC_ARG(uint16_t, u16Src, 1);
6326 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6327 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6328
6329 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6330 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6331 IEM_MC_REF_EFLAGS(pEFlags);
6332 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6333
6334 IEM_MC_ADVANCE_RIP();
6335 IEM_MC_END();
6336 return VINF_SUCCESS;
6337
6338 case IEMMODE_32BIT:
6339 IEM_MC_BEGIN(4, 0);
6340 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6341 IEM_MC_ARG(uint32_t, u32Src, 1);
6342 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6343 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6344
6345 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6346 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6347 IEM_MC_REF_EFLAGS(pEFlags);
6348 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6349
6350 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6351 IEM_MC_ADVANCE_RIP();
6352 IEM_MC_END();
6353 return VINF_SUCCESS;
6354
6355 case IEMMODE_64BIT:
6356 IEM_MC_BEGIN(4, 0);
6357 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6358 IEM_MC_ARG(uint64_t, u64Src, 1);
6359 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6360 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6361
6362 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6363 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6364 IEM_MC_REF_EFLAGS(pEFlags);
6365 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6366
6367 IEM_MC_ADVANCE_RIP();
6368 IEM_MC_END();
6369 return VINF_SUCCESS;
6370
6371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6372 }
6373 }
6374 else
6375 {
6376 switch (pVCpu->iem.s.enmEffOpSize)
6377 {
6378 case IEMMODE_16BIT:
6379 IEM_MC_BEGIN(4, 2);
6380 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6381 IEM_MC_ARG(uint16_t, u16Src, 1);
6382 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6383 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6385
6386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6387 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6388 IEM_MC_ASSIGN(cShiftArg, cShift);
6389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6390 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6391 IEM_MC_FETCH_EFLAGS(EFlags);
6392 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6393 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6394
6395 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6396 IEM_MC_COMMIT_EFLAGS(EFlags);
6397 IEM_MC_ADVANCE_RIP();
6398 IEM_MC_END();
6399 return VINF_SUCCESS;
6400
6401 case IEMMODE_32BIT:
6402 IEM_MC_BEGIN(4, 2);
6403 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6404 IEM_MC_ARG(uint32_t, u32Src, 1);
6405 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6406 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6408
6409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6410 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6411 IEM_MC_ASSIGN(cShiftArg, cShift);
6412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6413 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6414 IEM_MC_FETCH_EFLAGS(EFlags);
6415 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6416 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6417
6418 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6419 IEM_MC_COMMIT_EFLAGS(EFlags);
6420 IEM_MC_ADVANCE_RIP();
6421 IEM_MC_END();
6422 return VINF_SUCCESS;
6423
6424 case IEMMODE_64BIT:
6425 IEM_MC_BEGIN(4, 2);
6426 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6427 IEM_MC_ARG(uint64_t, u64Src, 1);
6428 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6429 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6431
6432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6433 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6434 IEM_MC_ASSIGN(cShiftArg, cShift);
6435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6436 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6437 IEM_MC_FETCH_EFLAGS(EFlags);
6438 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6439 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6440
6441 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6442 IEM_MC_COMMIT_EFLAGS(EFlags);
6443 IEM_MC_ADVANCE_RIP();
6444 IEM_MC_END();
6445 return VINF_SUCCESS;
6446
6447 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6448 }
6449 }
6450}
6451
6452
6453/**
6454 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6455 */
6456FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6457{
6458 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6459 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6460
6461 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6462 {
6463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6464
6465 switch (pVCpu->iem.s.enmEffOpSize)
6466 {
6467 case IEMMODE_16BIT:
6468 IEM_MC_BEGIN(4, 0);
6469 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6470 IEM_MC_ARG(uint16_t, u16Src, 1);
6471 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6472 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6473
6474 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6475 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6476 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6477 IEM_MC_REF_EFLAGS(pEFlags);
6478 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6479
6480 IEM_MC_ADVANCE_RIP();
6481 IEM_MC_END();
6482 return VINF_SUCCESS;
6483
6484 case IEMMODE_32BIT:
6485 IEM_MC_BEGIN(4, 0);
6486 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6487 IEM_MC_ARG(uint32_t, u32Src, 1);
6488 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6489 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6490
6491 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6492 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6493 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6494 IEM_MC_REF_EFLAGS(pEFlags);
6495 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6496
6497 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6498 IEM_MC_ADVANCE_RIP();
6499 IEM_MC_END();
6500 return VINF_SUCCESS;
6501
6502 case IEMMODE_64BIT:
6503 IEM_MC_BEGIN(4, 0);
6504 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6505 IEM_MC_ARG(uint64_t, u64Src, 1);
6506 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6507 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6508
6509 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6510 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6511 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6512 IEM_MC_REF_EFLAGS(pEFlags);
6513 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6514
6515 IEM_MC_ADVANCE_RIP();
6516 IEM_MC_END();
6517 return VINF_SUCCESS;
6518
6519 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6520 }
6521 }
6522 else
6523 {
6524 switch (pVCpu->iem.s.enmEffOpSize)
6525 {
6526 case IEMMODE_16BIT:
6527 IEM_MC_BEGIN(4, 2);
6528 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6529 IEM_MC_ARG(uint16_t, u16Src, 1);
6530 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6531 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6533
6534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6536 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6537 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6538 IEM_MC_FETCH_EFLAGS(EFlags);
6539 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6540 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6541
6542 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6543 IEM_MC_COMMIT_EFLAGS(EFlags);
6544 IEM_MC_ADVANCE_RIP();
6545 IEM_MC_END();
6546 return VINF_SUCCESS;
6547
6548 case IEMMODE_32BIT:
6549 IEM_MC_BEGIN(4, 2);
6550 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6551 IEM_MC_ARG(uint32_t, u32Src, 1);
6552 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6553 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6555
6556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6558 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6559 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6560 IEM_MC_FETCH_EFLAGS(EFlags);
6561 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6562 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6563
6564 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6565 IEM_MC_COMMIT_EFLAGS(EFlags);
6566 IEM_MC_ADVANCE_RIP();
6567 IEM_MC_END();
6568 return VINF_SUCCESS;
6569
6570 case IEMMODE_64BIT:
6571 IEM_MC_BEGIN(4, 2);
6572 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6573 IEM_MC_ARG(uint64_t, u64Src, 1);
6574 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6575 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6577
6578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6580 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6581 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6582 IEM_MC_FETCH_EFLAGS(EFlags);
6583 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6584 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6585
6586 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6587 IEM_MC_COMMIT_EFLAGS(EFlags);
6588 IEM_MC_ADVANCE_RIP();
6589 IEM_MC_END();
6590 return VINF_SUCCESS;
6591
6592 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6593 }
6594 }
6595}
6596
6597
6598
6599/** Opcode 0x0f 0xa4. */
6600FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6601{
6602 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6603 IEMOP_HLP_MIN_386();
6604 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6605}
6606
6607
6608/** Opcode 0x0f 0xa5. */
6609FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6610{
6611 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6612 IEMOP_HLP_MIN_386();
6613 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6614}
6615
6616
6617/** Opcode 0x0f 0xa8. */
6618FNIEMOP_DEF(iemOp_push_gs)
6619{
6620 IEMOP_MNEMONIC(push_gs, "push gs");
6621 IEMOP_HLP_MIN_386();
6622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6623 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6624}
6625
6626
6627/** Opcode 0x0f 0xa9. */
6628FNIEMOP_DEF(iemOp_pop_gs)
6629{
6630 IEMOP_MNEMONIC(pop_gs, "pop gs");
6631 IEMOP_HLP_MIN_386();
6632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6633 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6634}
6635
6636
6637/** Opcode 0x0f 0xaa. */
6638FNIEMOP_DEF(iemOp_rsm)
6639{
6640 IEMOP_MNEMONIC(rsm, "rsm");
6641 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_RSM, SVM_EXIT_RSM, 0, 0);
6642 /** @todo rsm - for the regular case (above handles only the SVM nested-guest
6643 * intercept). */
6644 IEMOP_BITCH_ABOUT_STUB();
6645 return IEMOP_RAISE_INVALID_OPCODE();
6646}
6647
6648//IEMOP_HLP_MIN_386();
6649
6650
6651/** Opcode 0x0f 0xab. */
6652FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6653{
6654 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6655 IEMOP_HLP_MIN_386();
6656 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6657}
6658
6659
6660/** Opcode 0x0f 0xac. */
6661FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6662{
6663 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6664 IEMOP_HLP_MIN_386();
6665 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6666}
6667
6668
6669/** Opcode 0x0f 0xad. */
6670FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6671{
6672 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6673 IEMOP_HLP_MIN_386();
6674 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6675}
6676
6677
6678/** Opcode 0x0f 0xae mem/0. */
6679FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6680{
6681 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6682 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6683 return IEMOP_RAISE_INVALID_OPCODE();
6684
6685 IEM_MC_BEGIN(3, 1);
6686 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6687 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6688 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6691 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6692 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6693 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6694 IEM_MC_END();
6695 return VINF_SUCCESS;
6696}
6697
6698
6699/** Opcode 0x0f 0xae mem/1. */
6700FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6701{
6702 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6703 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6704 return IEMOP_RAISE_INVALID_OPCODE();
6705
6706 IEM_MC_BEGIN(3, 1);
6707 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6708 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6709 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6712 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6713 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6714 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6715 IEM_MC_END();
6716 return VINF_SUCCESS;
6717}
6718
6719
6720/**
6721 * @opmaps grp15
6722 * @opcode !11/2
6723 * @oppfx none
6724 * @opcpuid sse
6725 * @opgroup og_sse_mxcsrsm
6726 * @opxcpttype 5
6727 * @optest op1=0 -> mxcsr=0
6728 * @optest op1=0x2083 -> mxcsr=0x2083
6729 * @optest op1=0xfffffffe -> value.xcpt=0xd
6730 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6731 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6732 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6733 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6734 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6735 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6736 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6737 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6738 */
6739FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6740{
6741 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6742 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6743 return IEMOP_RAISE_INVALID_OPCODE();
6744
6745 IEM_MC_BEGIN(2, 0);
6746 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6747 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6750 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6751 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6752 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6753 IEM_MC_END();
6754 return VINF_SUCCESS;
6755}
6756
6757
6758/**
6759 * @opmaps grp15
6760 * @opcode !11/3
6761 * @oppfx none
6762 * @opcpuid sse
6763 * @opgroup og_sse_mxcsrsm
6764 * @opxcpttype 5
6765 * @optest mxcsr=0 -> op1=0
6766 * @optest mxcsr=0x2083 -> op1=0x2083
6767 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6768 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6769 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6770 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6771 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6772 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6773 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6774 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6775 */
6776FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6777{
6778 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6779 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6780 return IEMOP_RAISE_INVALID_OPCODE();
6781
6782 IEM_MC_BEGIN(2, 0);
6783 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6784 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6787 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6788 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6789 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6790 IEM_MC_END();
6791 return VINF_SUCCESS;
6792}
6793
6794
6795/**
6796 * @opmaps grp15
6797 * @opcode !11/4
6798 * @oppfx none
6799 * @opcpuid xsave
6800 * @opgroup og_system
6801 * @opxcpttype none
6802 */
6803FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6804{
6805 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
6806 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6807 return IEMOP_RAISE_INVALID_OPCODE();
6808
6809 IEM_MC_BEGIN(3, 0);
6810 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6811 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6812 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6815 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6816 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6817 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6818 IEM_MC_END();
6819 return VINF_SUCCESS;
6820}
6821
6822
6823/**
6824 * @opmaps grp15
6825 * @opcode !11/5
6826 * @oppfx none
6827 * @opcpuid xsave
6828 * @opgroup og_system
6829 * @opxcpttype none
6830 */
6831FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6832{
6833 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
6834 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6835 return IEMOP_RAISE_INVALID_OPCODE();
6836
6837 IEM_MC_BEGIN(3, 0);
6838 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6839 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6840 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6843 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6844 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6845 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6846 IEM_MC_END();
6847 return VINF_SUCCESS;
6848}
6849
6850/** Opcode 0x0f 0xae mem/6. */
6851FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6852
6853/**
6854 * @opmaps grp15
6855 * @opcode !11/7
6856 * @oppfx none
6857 * @opcpuid clfsh
6858 * @opgroup og_cachectl
6859 * @optest op1=1 ->
6860 */
6861FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6862{
6863 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6864 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6865 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6866
6867 IEM_MC_BEGIN(2, 0);
6868 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6869 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6870 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6872 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6873 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6874 IEM_MC_END();
6875 return VINF_SUCCESS;
6876}
6877
6878/**
6879 * @opmaps grp15
6880 * @opcode !11/7
6881 * @oppfx 0x66
6882 * @opcpuid clflushopt
6883 * @opgroup og_cachectl
6884 * @optest op1=1 ->
6885 */
6886FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6887{
6888 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6889 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6890 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6891
6892 IEM_MC_BEGIN(2, 0);
6893 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6894 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6897 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6898 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6899 IEM_MC_END();
6900 return VINF_SUCCESS;
6901}
6902
6903
6904/** Opcode 0x0f 0xae 11b/5. */
6905FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6906{
6907 RT_NOREF_PV(bRm);
6908 IEMOP_MNEMONIC(lfence, "lfence");
6909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6910 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6911 return IEMOP_RAISE_INVALID_OPCODE();
6912
6913 IEM_MC_BEGIN(0, 0);
6914 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6915 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6916 else
6917 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6918 IEM_MC_ADVANCE_RIP();
6919 IEM_MC_END();
6920 return VINF_SUCCESS;
6921}
6922
6923
6924/** Opcode 0x0f 0xae 11b/6. */
6925FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6926{
6927 RT_NOREF_PV(bRm);
6928 IEMOP_MNEMONIC(mfence, "mfence");
6929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6930 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6931 return IEMOP_RAISE_INVALID_OPCODE();
6932
6933 IEM_MC_BEGIN(0, 0);
6934 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6935 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6936 else
6937 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6938 IEM_MC_ADVANCE_RIP();
6939 IEM_MC_END();
6940 return VINF_SUCCESS;
6941}
6942
6943
6944/** Opcode 0x0f 0xae 11b/7. */
6945FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6946{
6947 RT_NOREF_PV(bRm);
6948 IEMOP_MNEMONIC(sfence, "sfence");
6949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6950 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6951 return IEMOP_RAISE_INVALID_OPCODE();
6952
6953 IEM_MC_BEGIN(0, 0);
6954 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6955 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6956 else
6957 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6958 IEM_MC_ADVANCE_RIP();
6959 IEM_MC_END();
6960 return VINF_SUCCESS;
6961}
6962
6963
6964/** Opcode 0xf3 0x0f 0xae 11b/0. */
6965FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
6966{
6967 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
6968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6969 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
6970 {
6971 IEM_MC_BEGIN(1, 0);
6972 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6973 IEM_MC_ARG(uint64_t, u64Dst, 0);
6974 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
6975 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
6976 IEM_MC_ADVANCE_RIP();
6977 IEM_MC_END();
6978 }
6979 else
6980 {
6981 IEM_MC_BEGIN(1, 0);
6982 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6983 IEM_MC_ARG(uint32_t, u32Dst, 0);
6984 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
6985 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
6986 IEM_MC_ADVANCE_RIP();
6987 IEM_MC_END();
6988 }
6989 return VINF_SUCCESS;
6990}
6991
6992/** Opcode 0xf3 0x0f 0xae 11b/1. */
6993FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
6994{
6995 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
6996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6997 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
6998 {
6999 IEM_MC_BEGIN(1, 0);
7000 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7001 IEM_MC_ARG(uint64_t, u64Dst, 0);
7002 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
7003 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
7004 IEM_MC_ADVANCE_RIP();
7005 IEM_MC_END();
7006 }
7007 else
7008 {
7009 IEM_MC_BEGIN(1, 0);
7010 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7011 IEM_MC_ARG(uint32_t, u32Dst, 0);
7012 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
7013 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
7014 IEM_MC_ADVANCE_RIP();
7015 IEM_MC_END();
7016 }
7017 return VINF_SUCCESS;
7018}
7019
7020/** Opcode 0xf3 0x0f 0xae 11b/2. */
7021FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
7022{
7023 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
7024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7025 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7026 {
7027 IEM_MC_BEGIN(1, 0);
7028 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7029 IEM_MC_ARG(uint64_t, u64Dst, 0);
7030 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7031 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7032 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
7033 IEM_MC_ADVANCE_RIP();
7034 IEM_MC_END();
7035 }
7036 else
7037 {
7038 IEM_MC_BEGIN(1, 0);
7039 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7040 IEM_MC_ARG(uint32_t, u32Dst, 0);
7041 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7042 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
7043 IEM_MC_ADVANCE_RIP();
7044 IEM_MC_END();
7045 }
7046 return VINF_SUCCESS;
7047}
7048
7049/** Opcode 0xf3 0x0f 0xae 11b/3. */
7050FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
7051{
7052 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
7053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7054 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7055 {
7056 IEM_MC_BEGIN(1, 0);
7057 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7058 IEM_MC_ARG(uint64_t, u64Dst, 0);
7059 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7060 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7061 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
7062 IEM_MC_ADVANCE_RIP();
7063 IEM_MC_END();
7064 }
7065 else
7066 {
7067 IEM_MC_BEGIN(1, 0);
7068 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7069 IEM_MC_ARG(uint32_t, u32Dst, 0);
7070 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7071 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
7072 IEM_MC_ADVANCE_RIP();
7073 IEM_MC_END();
7074 }
7075 return VINF_SUCCESS;
7076}
7077
7078
7079/**
7080 * Group 15 jump table for register variant.
7081 */
7082IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
7083{ /* pfx: none, 066h, 0f3h, 0f2h */
7084 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
7085 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
7086 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
7087 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
7088 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7089 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7090 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7091 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7092};
7093AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
7094
7095
7096/**
7097 * Group 15 jump table for memory variant.
7098 */
7099IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
7100{ /* pfx: none, 066h, 0f3h, 0f2h */
7101 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7102 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7103 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7104 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7105 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7106 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7107 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7108 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7109};
7110AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
7111
7112
7113/** Opcode 0x0f 0xae. */
7114FNIEMOP_DEF(iemOp_Grp15)
7115{
7116 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
7117 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7118 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7119 /* register, register */
7120 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7121 + pVCpu->iem.s.idxPrefix], bRm);
7122 /* memory, register */
7123 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7124 + pVCpu->iem.s.idxPrefix], bRm);
7125}
7126
7127
7128/** Opcode 0x0f 0xaf. */
7129FNIEMOP_DEF(iemOp_imul_Gv_Ev)
7130{
7131 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
7132 IEMOP_HLP_MIN_386();
7133 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7134 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
7135}
7136
7137
7138/** Opcode 0x0f 0xb0. */
7139FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
7140{
7141 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
7142 IEMOP_HLP_MIN_486();
7143 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7144
7145 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7146 {
7147 IEMOP_HLP_DONE_DECODING();
7148 IEM_MC_BEGIN(4, 0);
7149 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7150 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7151 IEM_MC_ARG(uint8_t, u8Src, 2);
7152 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7153
7154 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7155 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7156 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
7157 IEM_MC_REF_EFLAGS(pEFlags);
7158 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7159 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7160 else
7161 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7162
7163 IEM_MC_ADVANCE_RIP();
7164 IEM_MC_END();
7165 }
7166 else
7167 {
7168 IEM_MC_BEGIN(4, 3);
7169 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7170 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7171 IEM_MC_ARG(uint8_t, u8Src, 2);
7172 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7174 IEM_MC_LOCAL(uint8_t, u8Al);
7175
7176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7177 IEMOP_HLP_DONE_DECODING();
7178 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7179 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7180 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
7181 IEM_MC_FETCH_EFLAGS(EFlags);
7182 IEM_MC_REF_LOCAL(pu8Al, u8Al);
7183 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7184 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7185 else
7186 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7187
7188 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7189 IEM_MC_COMMIT_EFLAGS(EFlags);
7190 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
7191 IEM_MC_ADVANCE_RIP();
7192 IEM_MC_END();
7193 }
7194 return VINF_SUCCESS;
7195}
7196
7197/** Opcode 0x0f 0xb1. */
7198FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
7199{
7200 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
7201 IEMOP_HLP_MIN_486();
7202 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7203
7204 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7205 {
7206 IEMOP_HLP_DONE_DECODING();
7207 switch (pVCpu->iem.s.enmEffOpSize)
7208 {
7209 case IEMMODE_16BIT:
7210 IEM_MC_BEGIN(4, 0);
7211 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7212 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7213 IEM_MC_ARG(uint16_t, u16Src, 2);
7214 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7215
7216 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7217 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7218 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
7219 IEM_MC_REF_EFLAGS(pEFlags);
7220 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7221 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7222 else
7223 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7224
7225 IEM_MC_ADVANCE_RIP();
7226 IEM_MC_END();
7227 return VINF_SUCCESS;
7228
7229 case IEMMODE_32BIT:
7230 IEM_MC_BEGIN(4, 0);
7231 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7232 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7233 IEM_MC_ARG(uint32_t, u32Src, 2);
7234 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7235
7236 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7237 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7238 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
7239 IEM_MC_REF_EFLAGS(pEFlags);
7240 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7241 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7242 else
7243 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7244
7245 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
7246 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7247 IEM_MC_ADVANCE_RIP();
7248 IEM_MC_END();
7249 return VINF_SUCCESS;
7250
7251 case IEMMODE_64BIT:
7252 IEM_MC_BEGIN(4, 0);
7253 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7254 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7255#ifdef RT_ARCH_X86
7256 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7257#else
7258 IEM_MC_ARG(uint64_t, u64Src, 2);
7259#endif
7260 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7261
7262 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7263 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
7264 IEM_MC_REF_EFLAGS(pEFlags);
7265#ifdef RT_ARCH_X86
7266 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7267 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7268 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7269 else
7270 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7271#else
7272 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7273 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7274 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7275 else
7276 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7277#endif
7278
7279 IEM_MC_ADVANCE_RIP();
7280 IEM_MC_END();
7281 return VINF_SUCCESS;
7282
7283 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7284 }
7285 }
7286 else
7287 {
7288 switch (pVCpu->iem.s.enmEffOpSize)
7289 {
7290 case IEMMODE_16BIT:
7291 IEM_MC_BEGIN(4, 3);
7292 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7293 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7294 IEM_MC_ARG(uint16_t, u16Src, 2);
7295 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7297 IEM_MC_LOCAL(uint16_t, u16Ax);
7298
7299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7300 IEMOP_HLP_DONE_DECODING();
7301 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7302 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7303 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
7304 IEM_MC_FETCH_EFLAGS(EFlags);
7305 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
7306 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7307 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7308 else
7309 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7310
7311 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7312 IEM_MC_COMMIT_EFLAGS(EFlags);
7313 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
7314 IEM_MC_ADVANCE_RIP();
7315 IEM_MC_END();
7316 return VINF_SUCCESS;
7317
7318 case IEMMODE_32BIT:
7319 IEM_MC_BEGIN(4, 3);
7320 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7321 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7322 IEM_MC_ARG(uint32_t, u32Src, 2);
7323 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7325 IEM_MC_LOCAL(uint32_t, u32Eax);
7326
7327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7328 IEMOP_HLP_DONE_DECODING();
7329 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7330 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7331 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
7332 IEM_MC_FETCH_EFLAGS(EFlags);
7333 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
7334 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7335 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7336 else
7337 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7338
7339 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7340 IEM_MC_COMMIT_EFLAGS(EFlags);
7341 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
7342 IEM_MC_ADVANCE_RIP();
7343 IEM_MC_END();
7344 return VINF_SUCCESS;
7345
7346 case IEMMODE_64BIT:
7347 IEM_MC_BEGIN(4, 3);
7348 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7349 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7350#ifdef RT_ARCH_X86
7351 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7352#else
7353 IEM_MC_ARG(uint64_t, u64Src, 2);
7354#endif
7355 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7357 IEM_MC_LOCAL(uint64_t, u64Rax);
7358
7359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7360 IEMOP_HLP_DONE_DECODING();
7361 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7362 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
7363 IEM_MC_FETCH_EFLAGS(EFlags);
7364 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
7365#ifdef RT_ARCH_X86
7366 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7367 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7368 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7369 else
7370 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7371#else
7372 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7373 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7374 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7375 else
7376 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7377#endif
7378
7379 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7380 IEM_MC_COMMIT_EFLAGS(EFlags);
7381 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
7382 IEM_MC_ADVANCE_RIP();
7383 IEM_MC_END();
7384 return VINF_SUCCESS;
7385
7386 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7387 }
7388 }
7389}
7390
7391
7392FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
7393{
7394 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
7395 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
7396
7397 switch (pVCpu->iem.s.enmEffOpSize)
7398 {
7399 case IEMMODE_16BIT:
7400 IEM_MC_BEGIN(5, 1);
7401 IEM_MC_ARG(uint16_t, uSel, 0);
7402 IEM_MC_ARG(uint16_t, offSeg, 1);
7403 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7404 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7405 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7406 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7409 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7410 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
7411 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7412 IEM_MC_END();
7413 return VINF_SUCCESS;
7414
7415 case IEMMODE_32BIT:
7416 IEM_MC_BEGIN(5, 1);
7417 IEM_MC_ARG(uint16_t, uSel, 0);
7418 IEM_MC_ARG(uint32_t, offSeg, 1);
7419 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7420 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7421 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7422 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7425 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7426 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
7427 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7428 IEM_MC_END();
7429 return VINF_SUCCESS;
7430
7431 case IEMMODE_64BIT:
7432 IEM_MC_BEGIN(5, 1);
7433 IEM_MC_ARG(uint16_t, uSel, 0);
7434 IEM_MC_ARG(uint64_t, offSeg, 1);
7435 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7436 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7437 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7438 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7441 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
7442 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7443 else
7444 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7445 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
7446 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7447 IEM_MC_END();
7448 return VINF_SUCCESS;
7449
7450 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7451 }
7452}
7453
7454
7455/** Opcode 0x0f 0xb2. */
7456FNIEMOP_DEF(iemOp_lss_Gv_Mp)
7457{
7458 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
7459 IEMOP_HLP_MIN_386();
7460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7461 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7462 return IEMOP_RAISE_INVALID_OPCODE();
7463 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
7464}
7465
7466
7467/** Opcode 0x0f 0xb3. */
7468FNIEMOP_DEF(iemOp_btr_Ev_Gv)
7469{
7470 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
7471 IEMOP_HLP_MIN_386();
7472 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
7473}
7474
7475
7476/** Opcode 0x0f 0xb4. */
7477FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
7478{
7479 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
7480 IEMOP_HLP_MIN_386();
7481 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7482 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7483 return IEMOP_RAISE_INVALID_OPCODE();
7484 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
7485}
7486
7487
7488/** Opcode 0x0f 0xb5. */
7489FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
7490{
7491 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
7492 IEMOP_HLP_MIN_386();
7493 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7494 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7495 return IEMOP_RAISE_INVALID_OPCODE();
7496 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
7497}
7498
7499
7500/** Opcode 0x0f 0xb6. */
7501FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
7502{
7503 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
7504 IEMOP_HLP_MIN_386();
7505
7506 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7507
7508 /*
7509 * If rm is denoting a register, no more instruction bytes.
7510 */
7511 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7512 {
7513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7514 switch (pVCpu->iem.s.enmEffOpSize)
7515 {
7516 case IEMMODE_16BIT:
7517 IEM_MC_BEGIN(0, 1);
7518 IEM_MC_LOCAL(uint16_t, u16Value);
7519 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7520 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7521 IEM_MC_ADVANCE_RIP();
7522 IEM_MC_END();
7523 return VINF_SUCCESS;
7524
7525 case IEMMODE_32BIT:
7526 IEM_MC_BEGIN(0, 1);
7527 IEM_MC_LOCAL(uint32_t, u32Value);
7528 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7529 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7530 IEM_MC_ADVANCE_RIP();
7531 IEM_MC_END();
7532 return VINF_SUCCESS;
7533
7534 case IEMMODE_64BIT:
7535 IEM_MC_BEGIN(0, 1);
7536 IEM_MC_LOCAL(uint64_t, u64Value);
7537 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7538 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7539 IEM_MC_ADVANCE_RIP();
7540 IEM_MC_END();
7541 return VINF_SUCCESS;
7542
7543 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7544 }
7545 }
7546 else
7547 {
7548 /*
7549 * We're loading a register from memory.
7550 */
7551 switch (pVCpu->iem.s.enmEffOpSize)
7552 {
7553 case IEMMODE_16BIT:
7554 IEM_MC_BEGIN(0, 2);
7555 IEM_MC_LOCAL(uint16_t, u16Value);
7556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7559 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7560 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7561 IEM_MC_ADVANCE_RIP();
7562 IEM_MC_END();
7563 return VINF_SUCCESS;
7564
7565 case IEMMODE_32BIT:
7566 IEM_MC_BEGIN(0, 2);
7567 IEM_MC_LOCAL(uint32_t, u32Value);
7568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7571 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7572 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7573 IEM_MC_ADVANCE_RIP();
7574 IEM_MC_END();
7575 return VINF_SUCCESS;
7576
7577 case IEMMODE_64BIT:
7578 IEM_MC_BEGIN(0, 2);
7579 IEM_MC_LOCAL(uint64_t, u64Value);
7580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7583 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7584 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7585 IEM_MC_ADVANCE_RIP();
7586 IEM_MC_END();
7587 return VINF_SUCCESS;
7588
7589 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7590 }
7591 }
7592}
7593
7594
7595/** Opcode 0x0f 0xb7. */
7596FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7597{
7598 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7599 IEMOP_HLP_MIN_386();
7600
7601 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7602
7603 /** @todo Not entirely sure how the operand size prefix is handled here,
7604 * assuming that it will be ignored. Would be nice to have a few
7605 * test for this. */
7606 /*
7607 * If rm is denoting a register, no more instruction bytes.
7608 */
7609 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7610 {
7611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7612 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7613 {
7614 IEM_MC_BEGIN(0, 1);
7615 IEM_MC_LOCAL(uint32_t, u32Value);
7616 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7617 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7618 IEM_MC_ADVANCE_RIP();
7619 IEM_MC_END();
7620 }
7621 else
7622 {
7623 IEM_MC_BEGIN(0, 1);
7624 IEM_MC_LOCAL(uint64_t, u64Value);
7625 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7626 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7627 IEM_MC_ADVANCE_RIP();
7628 IEM_MC_END();
7629 }
7630 }
7631 else
7632 {
7633 /*
7634 * We're loading a register from memory.
7635 */
7636 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7637 {
7638 IEM_MC_BEGIN(0, 2);
7639 IEM_MC_LOCAL(uint32_t, u32Value);
7640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7643 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7644 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7645 IEM_MC_ADVANCE_RIP();
7646 IEM_MC_END();
7647 }
7648 else
7649 {
7650 IEM_MC_BEGIN(0, 2);
7651 IEM_MC_LOCAL(uint64_t, u64Value);
7652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7655 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7656 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7657 IEM_MC_ADVANCE_RIP();
7658 IEM_MC_END();
7659 }
7660 }
7661 return VINF_SUCCESS;
7662}
7663
7664
7665/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7666FNIEMOP_UD_STUB(iemOp_jmpe);
7667/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7668FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7669
7670
7671/**
7672 * @opcode 0xb9
7673 * @opinvalid intel-modrm
7674 * @optest ->
7675 */
7676FNIEMOP_DEF(iemOp_Grp10)
7677{
7678 /*
7679 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7680 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7681 */
7682 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7683 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
7684 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7685}
7686
7687
7688/** Opcode 0x0f 0xba. */
7689FNIEMOP_DEF(iemOp_Grp8)
7690{
7691 IEMOP_HLP_MIN_386();
7692 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7693 PCIEMOPBINSIZES pImpl;
7694 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7695 {
7696 case 0: case 1: case 2: case 3:
7697 /* Both AMD and Intel want full modr/m decoding and imm8. */
7698 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7699 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7700 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7701 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7702 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7703 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7704 }
7705 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7706
7707 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7708 {
7709 /* register destination. */
7710 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7712
7713 switch (pVCpu->iem.s.enmEffOpSize)
7714 {
7715 case IEMMODE_16BIT:
7716 IEM_MC_BEGIN(3, 0);
7717 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7718 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7719 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7720
7721 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7722 IEM_MC_REF_EFLAGS(pEFlags);
7723 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7724
7725 IEM_MC_ADVANCE_RIP();
7726 IEM_MC_END();
7727 return VINF_SUCCESS;
7728
7729 case IEMMODE_32BIT:
7730 IEM_MC_BEGIN(3, 0);
7731 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7732 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7733 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7734
7735 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7736 IEM_MC_REF_EFLAGS(pEFlags);
7737 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7738
7739 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7740 IEM_MC_ADVANCE_RIP();
7741 IEM_MC_END();
7742 return VINF_SUCCESS;
7743
7744 case IEMMODE_64BIT:
7745 IEM_MC_BEGIN(3, 0);
7746 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7747 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7748 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7749
7750 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7751 IEM_MC_REF_EFLAGS(pEFlags);
7752 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7753
7754 IEM_MC_ADVANCE_RIP();
7755 IEM_MC_END();
7756 return VINF_SUCCESS;
7757
7758 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7759 }
7760 }
7761 else
7762 {
7763 /* memory destination. */
7764
7765 uint32_t fAccess;
7766 if (pImpl->pfnLockedU16)
7767 fAccess = IEM_ACCESS_DATA_RW;
7768 else /* BT */
7769 fAccess = IEM_ACCESS_DATA_R;
7770
7771 /** @todo test negative bit offsets! */
7772 switch (pVCpu->iem.s.enmEffOpSize)
7773 {
7774 case IEMMODE_16BIT:
7775 IEM_MC_BEGIN(3, 1);
7776 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7777 IEM_MC_ARG(uint16_t, u16Src, 1);
7778 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7780
7781 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7782 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7783 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7784 if (pImpl->pfnLockedU16)
7785 IEMOP_HLP_DONE_DECODING();
7786 else
7787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7788 IEM_MC_FETCH_EFLAGS(EFlags);
7789 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7790 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7791 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7792 else
7793 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7794 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7795
7796 IEM_MC_COMMIT_EFLAGS(EFlags);
7797 IEM_MC_ADVANCE_RIP();
7798 IEM_MC_END();
7799 return VINF_SUCCESS;
7800
7801 case IEMMODE_32BIT:
7802 IEM_MC_BEGIN(3, 1);
7803 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7804 IEM_MC_ARG(uint32_t, u32Src, 1);
7805 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7807
7808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7809 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7810 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7811 if (pImpl->pfnLockedU16)
7812 IEMOP_HLP_DONE_DECODING();
7813 else
7814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7815 IEM_MC_FETCH_EFLAGS(EFlags);
7816 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7817 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7818 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7819 else
7820 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7821 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7822
7823 IEM_MC_COMMIT_EFLAGS(EFlags);
7824 IEM_MC_ADVANCE_RIP();
7825 IEM_MC_END();
7826 return VINF_SUCCESS;
7827
7828 case IEMMODE_64BIT:
7829 IEM_MC_BEGIN(3, 1);
7830 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7831 IEM_MC_ARG(uint64_t, u64Src, 1);
7832 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7834
7835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7836 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7837 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7838 if (pImpl->pfnLockedU16)
7839 IEMOP_HLP_DONE_DECODING();
7840 else
7841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7842 IEM_MC_FETCH_EFLAGS(EFlags);
7843 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7844 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7845 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7846 else
7847 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7848 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7849
7850 IEM_MC_COMMIT_EFLAGS(EFlags);
7851 IEM_MC_ADVANCE_RIP();
7852 IEM_MC_END();
7853 return VINF_SUCCESS;
7854
7855 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7856 }
7857 }
7858}
7859
7860
7861/** Opcode 0x0f 0xbb. */
7862FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7863{
7864 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7865 IEMOP_HLP_MIN_386();
7866 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7867}
7868
7869
7870/** Opcode 0x0f 0xbc. */
7871FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7872{
7873 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7874 IEMOP_HLP_MIN_386();
7875 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7876 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7877}
7878
7879
7880/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7881FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7882
7883
7884/** Opcode 0x0f 0xbd. */
7885FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7886{
7887 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7888 IEMOP_HLP_MIN_386();
7889 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7890 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7891}
7892
7893
7894/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7895FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7896
7897
7898/** Opcode 0x0f 0xbe. */
7899FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7900{
7901 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7902 IEMOP_HLP_MIN_386();
7903
7904 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7905
7906 /*
7907 * If rm is denoting a register, no more instruction bytes.
7908 */
7909 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7910 {
7911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7912 switch (pVCpu->iem.s.enmEffOpSize)
7913 {
7914 case IEMMODE_16BIT:
7915 IEM_MC_BEGIN(0, 1);
7916 IEM_MC_LOCAL(uint16_t, u16Value);
7917 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7918 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7919 IEM_MC_ADVANCE_RIP();
7920 IEM_MC_END();
7921 return VINF_SUCCESS;
7922
7923 case IEMMODE_32BIT:
7924 IEM_MC_BEGIN(0, 1);
7925 IEM_MC_LOCAL(uint32_t, u32Value);
7926 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7927 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7928 IEM_MC_ADVANCE_RIP();
7929 IEM_MC_END();
7930 return VINF_SUCCESS;
7931
7932 case IEMMODE_64BIT:
7933 IEM_MC_BEGIN(0, 1);
7934 IEM_MC_LOCAL(uint64_t, u64Value);
7935 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7936 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7937 IEM_MC_ADVANCE_RIP();
7938 IEM_MC_END();
7939 return VINF_SUCCESS;
7940
7941 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7942 }
7943 }
7944 else
7945 {
7946 /*
7947 * We're loading a register from memory.
7948 */
7949 switch (pVCpu->iem.s.enmEffOpSize)
7950 {
7951 case IEMMODE_16BIT:
7952 IEM_MC_BEGIN(0, 2);
7953 IEM_MC_LOCAL(uint16_t, u16Value);
7954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7957 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7958 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7959 IEM_MC_ADVANCE_RIP();
7960 IEM_MC_END();
7961 return VINF_SUCCESS;
7962
7963 case IEMMODE_32BIT:
7964 IEM_MC_BEGIN(0, 2);
7965 IEM_MC_LOCAL(uint32_t, u32Value);
7966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7967 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7969 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7970 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7971 IEM_MC_ADVANCE_RIP();
7972 IEM_MC_END();
7973 return VINF_SUCCESS;
7974
7975 case IEMMODE_64BIT:
7976 IEM_MC_BEGIN(0, 2);
7977 IEM_MC_LOCAL(uint64_t, u64Value);
7978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7979 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7981 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7982 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7983 IEM_MC_ADVANCE_RIP();
7984 IEM_MC_END();
7985 return VINF_SUCCESS;
7986
7987 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7988 }
7989 }
7990}
7991
7992
7993/** Opcode 0x0f 0xbf. */
7994FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7995{
7996 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7997 IEMOP_HLP_MIN_386();
7998
7999 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8000
8001 /** @todo Not entirely sure how the operand size prefix is handled here,
8002 * assuming that it will be ignored. Would be nice to have a few
8003 * test for this. */
8004 /*
8005 * If rm is denoting a register, no more instruction bytes.
8006 */
8007 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8008 {
8009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8010 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8011 {
8012 IEM_MC_BEGIN(0, 1);
8013 IEM_MC_LOCAL(uint32_t, u32Value);
8014 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8015 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8016 IEM_MC_ADVANCE_RIP();
8017 IEM_MC_END();
8018 }
8019 else
8020 {
8021 IEM_MC_BEGIN(0, 1);
8022 IEM_MC_LOCAL(uint64_t, u64Value);
8023 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8024 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8025 IEM_MC_ADVANCE_RIP();
8026 IEM_MC_END();
8027 }
8028 }
8029 else
8030 {
8031 /*
8032 * We're loading a register from memory.
8033 */
8034 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8035 {
8036 IEM_MC_BEGIN(0, 2);
8037 IEM_MC_LOCAL(uint32_t, u32Value);
8038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8041 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8042 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8043 IEM_MC_ADVANCE_RIP();
8044 IEM_MC_END();
8045 }
8046 else
8047 {
8048 IEM_MC_BEGIN(0, 2);
8049 IEM_MC_LOCAL(uint64_t, u64Value);
8050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8053 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8054 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8055 IEM_MC_ADVANCE_RIP();
8056 IEM_MC_END();
8057 }
8058 }
8059 return VINF_SUCCESS;
8060}
8061
8062
8063/** Opcode 0x0f 0xc0. */
8064FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
8065{
8066 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8067 IEMOP_HLP_MIN_486();
8068 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
8069
8070 /*
8071 * If rm is denoting a register, no more instruction bytes.
8072 */
8073 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8074 {
8075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8076
8077 IEM_MC_BEGIN(3, 0);
8078 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8079 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8080 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8081
8082 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8083 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8084 IEM_MC_REF_EFLAGS(pEFlags);
8085 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8086
8087 IEM_MC_ADVANCE_RIP();
8088 IEM_MC_END();
8089 }
8090 else
8091 {
8092 /*
8093 * We're accessing memory.
8094 */
8095 IEM_MC_BEGIN(3, 3);
8096 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8097 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8098 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8099 IEM_MC_LOCAL(uint8_t, u8RegCopy);
8100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8101
8102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8103 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8104 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8105 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
8106 IEM_MC_FETCH_EFLAGS(EFlags);
8107 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8108 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8109 else
8110 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
8111
8112 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
8113 IEM_MC_COMMIT_EFLAGS(EFlags);
8114 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
8115 IEM_MC_ADVANCE_RIP();
8116 IEM_MC_END();
8117 return VINF_SUCCESS;
8118 }
8119 return VINF_SUCCESS;
8120}
8121
8122
8123/** Opcode 0x0f 0xc1. */
8124FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
8125{
8126 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
8127 IEMOP_HLP_MIN_486();
8128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8129
8130 /*
8131 * If rm is denoting a register, no more instruction bytes.
8132 */
8133 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8134 {
8135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8136
8137 switch (pVCpu->iem.s.enmEffOpSize)
8138 {
8139 case IEMMODE_16BIT:
8140 IEM_MC_BEGIN(3, 0);
8141 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8142 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8143 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8144
8145 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8146 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8147 IEM_MC_REF_EFLAGS(pEFlags);
8148 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8149
8150 IEM_MC_ADVANCE_RIP();
8151 IEM_MC_END();
8152 return VINF_SUCCESS;
8153
8154 case IEMMODE_32BIT:
8155 IEM_MC_BEGIN(3, 0);
8156 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8157 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8158 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8159
8160 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8161 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8162 IEM_MC_REF_EFLAGS(pEFlags);
8163 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8164
8165 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8166 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
8167 IEM_MC_ADVANCE_RIP();
8168 IEM_MC_END();
8169 return VINF_SUCCESS;
8170
8171 case IEMMODE_64BIT:
8172 IEM_MC_BEGIN(3, 0);
8173 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8174 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8175 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8176
8177 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8178 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8179 IEM_MC_REF_EFLAGS(pEFlags);
8180 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8181
8182 IEM_MC_ADVANCE_RIP();
8183 IEM_MC_END();
8184 return VINF_SUCCESS;
8185
8186 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8187 }
8188 }
8189 else
8190 {
8191 /*
8192 * We're accessing memory.
8193 */
8194 switch (pVCpu->iem.s.enmEffOpSize)
8195 {
8196 case IEMMODE_16BIT:
8197 IEM_MC_BEGIN(3, 3);
8198 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8199 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8200 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8201 IEM_MC_LOCAL(uint16_t, u16RegCopy);
8202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8203
8204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8205 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8206 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8207 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
8208 IEM_MC_FETCH_EFLAGS(EFlags);
8209 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8210 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8211 else
8212 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
8213
8214 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8215 IEM_MC_COMMIT_EFLAGS(EFlags);
8216 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
8217 IEM_MC_ADVANCE_RIP();
8218 IEM_MC_END();
8219 return VINF_SUCCESS;
8220
8221 case IEMMODE_32BIT:
8222 IEM_MC_BEGIN(3, 3);
8223 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8224 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8225 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8226 IEM_MC_LOCAL(uint32_t, u32RegCopy);
8227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8228
8229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8230 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8231 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8232 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
8233 IEM_MC_FETCH_EFLAGS(EFlags);
8234 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8235 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8236 else
8237 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
8238
8239 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8240 IEM_MC_COMMIT_EFLAGS(EFlags);
8241 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
8242 IEM_MC_ADVANCE_RIP();
8243 IEM_MC_END();
8244 return VINF_SUCCESS;
8245
8246 case IEMMODE_64BIT:
8247 IEM_MC_BEGIN(3, 3);
8248 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8249 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8250 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8251 IEM_MC_LOCAL(uint64_t, u64RegCopy);
8252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8253
8254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8255 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8256 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8257 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
8258 IEM_MC_FETCH_EFLAGS(EFlags);
8259 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8260 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8261 else
8262 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
8263
8264 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8265 IEM_MC_COMMIT_EFLAGS(EFlags);
8266 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
8267 IEM_MC_ADVANCE_RIP();
8268 IEM_MC_END();
8269 return VINF_SUCCESS;
8270
8271 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8272 }
8273 }
8274}
8275
8276
8277/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
8278FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
8279/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
8280FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
8281/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
8282FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
8283/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
8284FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
8285
8286
8287/** Opcode 0x0f 0xc3. */
8288FNIEMOP_DEF(iemOp_movnti_My_Gy)
8289{
8290 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
8291
8292 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8293
8294 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
8295 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8296 {
8297 switch (pVCpu->iem.s.enmEffOpSize)
8298 {
8299 case IEMMODE_32BIT:
8300 IEM_MC_BEGIN(0, 2);
8301 IEM_MC_LOCAL(uint32_t, u32Value);
8302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8303
8304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8306 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8307 return IEMOP_RAISE_INVALID_OPCODE();
8308
8309 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8310 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
8311 IEM_MC_ADVANCE_RIP();
8312 IEM_MC_END();
8313 break;
8314
8315 case IEMMODE_64BIT:
8316 IEM_MC_BEGIN(0, 2);
8317 IEM_MC_LOCAL(uint64_t, u64Value);
8318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8319
8320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8322 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8323 return IEMOP_RAISE_INVALID_OPCODE();
8324
8325 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8326 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
8327 IEM_MC_ADVANCE_RIP();
8328 IEM_MC_END();
8329 break;
8330
8331 case IEMMODE_16BIT:
8332 /** @todo check this form. */
8333 return IEMOP_RAISE_INVALID_OPCODE();
8334 }
8335 }
8336 else
8337 return IEMOP_RAISE_INVALID_OPCODE();
8338 return VINF_SUCCESS;
8339}
8340/* Opcode 0x66 0x0f 0xc3 - invalid */
8341/* Opcode 0xf3 0x0f 0xc3 - invalid */
8342/* Opcode 0xf2 0x0f 0xc3 - invalid */
8343
8344/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
8345FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
8346/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
8347FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
8348/* Opcode 0xf3 0x0f 0xc4 - invalid */
8349/* Opcode 0xf2 0x0f 0xc4 - invalid */
8350
8351/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
8352FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
8353/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
8354FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
8355/* Opcode 0xf3 0x0f 0xc5 - invalid */
8356/* Opcode 0xf2 0x0f 0xc5 - invalid */
8357
8358/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
8359FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
8360/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
8361FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
8362/* Opcode 0xf3 0x0f 0xc6 - invalid */
8363/* Opcode 0xf2 0x0f 0xc6 - invalid */
8364
8365
8366/** Opcode 0x0f 0xc7 !11/1. */
8367FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
8368{
8369 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
8370
8371 IEM_MC_BEGIN(4, 3);
8372 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
8373 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
8374 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
8375 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8376 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
8377 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
8378 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8379
8380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8381 IEMOP_HLP_DONE_DECODING();
8382 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8383
8384 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
8385 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
8386 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
8387
8388 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
8389 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
8390 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
8391
8392 IEM_MC_FETCH_EFLAGS(EFlags);
8393 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8394 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8395 else
8396 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8397
8398 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
8399 IEM_MC_COMMIT_EFLAGS(EFlags);
8400 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8401 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
8402 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
8403 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
8404 IEM_MC_ENDIF();
8405 IEM_MC_ADVANCE_RIP();
8406
8407 IEM_MC_END();
8408 return VINF_SUCCESS;
8409}
8410
8411
8412/** Opcode REX.W 0x0f 0xc7 !11/1. */
8413FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
8414{
8415 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
8416 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8417 {
8418#if 0
8419 RT_NOREF(bRm);
8420 IEMOP_BITCH_ABOUT_STUB();
8421 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8422#else
8423 IEM_MC_BEGIN(4, 3);
8424 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
8425 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
8426 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
8427 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8428 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
8429 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
8430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8431
8432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8433 IEMOP_HLP_DONE_DECODING();
8434 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
8435 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8436
8437 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
8438 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
8439 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
8440
8441 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
8442 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
8443 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
8444
8445 IEM_MC_FETCH_EFLAGS(EFlags);
8446# ifdef RT_ARCH_AMD64
8447 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8448 {
8449 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8450 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8451 else
8452 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8453 }
8454 else
8455# endif
8456 {
8457 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
8458 accesses and not all all atomic, which works fine on in UNI CPU guest
8459 configuration (ignoring DMA). If guest SMP is active we have no choice
8460 but to use a rendezvous callback here. Sigh. */
8461 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
8462 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8463 else
8464 {
8465 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8466 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
8467 }
8468 }
8469
8470 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
8471 IEM_MC_COMMIT_EFLAGS(EFlags);
8472 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8473 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
8474 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
8475 IEM_MC_ENDIF();
8476 IEM_MC_ADVANCE_RIP();
8477
8478 IEM_MC_END();
8479 return VINF_SUCCESS;
8480#endif
8481 }
8482 Log(("cmpxchg16b -> #UD\n"));
8483 return IEMOP_RAISE_INVALID_OPCODE();
8484}
8485
8486FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
8487{
8488 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
8489 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
8490 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
8491}
8492
8493/** Opcode 0x0f 0xc7 11/6. */
8494FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
8495
8496/** Opcode 0x0f 0xc7 !11/6. */
8497FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
8498
8499/** Opcode 0x66 0x0f 0xc7 !11/6. */
8500FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
8501
8502/** Opcode 0xf3 0x0f 0xc7 !11/6. */
8503FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
8504
8505/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
8506FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
8507
8508/** Opcode 0x0f 0xc7 11/7. */
8509FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
8510
8511
8512/**
8513 * Group 9 jump table for register variant.
8514 */
8515IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
8516{ /* pfx: none, 066h, 0f3h, 0f2h */
8517 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8518 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
8519 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8520 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8521 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8522 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8523 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8524 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8525};
8526AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
8527
8528
8529/**
8530 * Group 9 jump table for memory variant.
8531 */
8532IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
8533{ /* pfx: none, 066h, 0f3h, 0f2h */
8534 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8535 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
8536 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8537 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8538 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8539 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8540 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
8541 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8542};
8543AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
8544
8545
8546/** Opcode 0x0f 0xc7. */
8547FNIEMOP_DEF(iemOp_Grp9)
8548{
8549 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8550 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8551 /* register, register */
8552 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8553 + pVCpu->iem.s.idxPrefix], bRm);
8554 /* memory, register */
8555 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8556 + pVCpu->iem.s.idxPrefix], bRm);
8557}
8558
8559
8560/**
8561 * Common 'bswap register' helper.
8562 */
8563FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
8564{
8565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8566 switch (pVCpu->iem.s.enmEffOpSize)
8567 {
8568 case IEMMODE_16BIT:
8569 IEM_MC_BEGIN(1, 0);
8570 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8571 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
8572 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
8573 IEM_MC_ADVANCE_RIP();
8574 IEM_MC_END();
8575 return VINF_SUCCESS;
8576
8577 case IEMMODE_32BIT:
8578 IEM_MC_BEGIN(1, 0);
8579 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8580 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8581 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8582 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
8583 IEM_MC_ADVANCE_RIP();
8584 IEM_MC_END();
8585 return VINF_SUCCESS;
8586
8587 case IEMMODE_64BIT:
8588 IEM_MC_BEGIN(1, 0);
8589 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8590 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8591 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
8592 IEM_MC_ADVANCE_RIP();
8593 IEM_MC_END();
8594 return VINF_SUCCESS;
8595
8596 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8597 }
8598}
8599
8600
8601/** Opcode 0x0f 0xc8. */
8602FNIEMOP_DEF(iemOp_bswap_rAX_r8)
8603{
8604 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
8605 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
8606 prefix. REX.B is the correct prefix it appears. For a parallel
8607 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
8608 IEMOP_HLP_MIN_486();
8609 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8610}
8611
8612
8613/** Opcode 0x0f 0xc9. */
8614FNIEMOP_DEF(iemOp_bswap_rCX_r9)
8615{
8616 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
8617 IEMOP_HLP_MIN_486();
8618 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8619}
8620
8621
8622/** Opcode 0x0f 0xca. */
8623FNIEMOP_DEF(iemOp_bswap_rDX_r10)
8624{
8625 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
8626 IEMOP_HLP_MIN_486();
8627 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8628}
8629
8630
8631/** Opcode 0x0f 0xcb. */
8632FNIEMOP_DEF(iemOp_bswap_rBX_r11)
8633{
8634 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
8635 IEMOP_HLP_MIN_486();
8636 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8637}
8638
8639
8640/** Opcode 0x0f 0xcc. */
8641FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8642{
8643 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8644 IEMOP_HLP_MIN_486();
8645 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8646}
8647
8648
8649/** Opcode 0x0f 0xcd. */
8650FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8651{
8652 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8653 IEMOP_HLP_MIN_486();
8654 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8655}
8656
8657
8658/** Opcode 0x0f 0xce. */
8659FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8660{
8661 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8662 IEMOP_HLP_MIN_486();
8663 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8664}
8665
8666
8667/** Opcode 0x0f 0xcf. */
8668FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8669{
8670 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8671 IEMOP_HLP_MIN_486();
8672 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8673}
8674
8675
8676/* Opcode 0x0f 0xd0 - invalid */
8677/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8678FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8679/* Opcode 0xf3 0x0f 0xd0 - invalid */
8680/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8681FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8682
8683/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8684FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8685/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8686FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8687/* Opcode 0xf3 0x0f 0xd1 - invalid */
8688/* Opcode 0xf2 0x0f 0xd1 - invalid */
8689
8690/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8691FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8692/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8693FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8694/* Opcode 0xf3 0x0f 0xd2 - invalid */
8695/* Opcode 0xf2 0x0f 0xd2 - invalid */
8696
8697/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8698FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8699/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8700FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8701/* Opcode 0xf3 0x0f 0xd3 - invalid */
8702/* Opcode 0xf2 0x0f 0xd3 - invalid */
8703
8704/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8705FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8706/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8707FNIEMOP_STUB(iemOp_paddq_Vx_W);
8708/* Opcode 0xf3 0x0f 0xd4 - invalid */
8709/* Opcode 0xf2 0x0f 0xd4 - invalid */
8710
8711/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8712FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8713/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8714FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8715/* Opcode 0xf3 0x0f 0xd5 - invalid */
8716/* Opcode 0xf2 0x0f 0xd5 - invalid */
8717
8718/* Opcode 0x0f 0xd6 - invalid */
8719
8720/**
8721 * @opcode 0xd6
8722 * @oppfx 0x66
8723 * @opcpuid sse2
8724 * @opgroup og_sse2_pcksclr_datamove
8725 * @opxcpttype none
8726 * @optest op1=-1 op2=2 -> op1=2
8727 * @optest op1=0 op2=-42 -> op1=-42
8728 */
8729FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8730{
8731 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8732 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8733 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8734 {
8735 /*
8736 * Register, register.
8737 */
8738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8739 IEM_MC_BEGIN(0, 2);
8740 IEM_MC_LOCAL(uint64_t, uSrc);
8741
8742 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8743 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8744
8745 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8746 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8747
8748 IEM_MC_ADVANCE_RIP();
8749 IEM_MC_END();
8750 }
8751 else
8752 {
8753 /*
8754 * Memory, register.
8755 */
8756 IEM_MC_BEGIN(0, 2);
8757 IEM_MC_LOCAL(uint64_t, uSrc);
8758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8759
8760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8762 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8763 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8764
8765 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8766 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8767
8768 IEM_MC_ADVANCE_RIP();
8769 IEM_MC_END();
8770 }
8771 return VINF_SUCCESS;
8772}
8773
8774
8775/**
8776 * @opcode 0xd6
8777 * @opcodesub 11 mr/reg
8778 * @oppfx f3
8779 * @opcpuid sse2
8780 * @opgroup og_sse2_simdint_datamove
8781 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8782 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8783 */
8784FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
8785{
8786 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8787 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8788 {
8789 /*
8790 * Register, register.
8791 */
8792 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8794 IEM_MC_BEGIN(0, 1);
8795 IEM_MC_LOCAL(uint64_t, uSrc);
8796
8797 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8798 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8799
8800 IEM_MC_FETCH_MREG_U64(uSrc, bRm & X86_MODRM_RM_MASK);
8801 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
8802 IEM_MC_FPU_TO_MMX_MODE();
8803
8804 IEM_MC_ADVANCE_RIP();
8805 IEM_MC_END();
8806 return VINF_SUCCESS;
8807 }
8808
8809 /**
8810 * @opdone
8811 * @opmnemonic udf30fd6mem
8812 * @opcode 0xd6
8813 * @opcodesub !11 mr/reg
8814 * @oppfx f3
8815 * @opunused intel-modrm
8816 * @opcpuid sse
8817 * @optest ->
8818 */
8819 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8820}
8821
8822
8823/**
8824 * @opcode 0xd6
8825 * @opcodesub 11 mr/reg
8826 * @oppfx f2
8827 * @opcpuid sse2
8828 * @opgroup og_sse2_simdint_datamove
8829 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8830 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8831 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
8832 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
8833 * @optest op1=-42 op2=0xfedcba9876543210
8834 * -> op1=0xfedcba9876543210 ftw=0xff
8835 */
8836FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
8837{
8838 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8839 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8840 {
8841 /*
8842 * Register, register.
8843 */
8844 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8846 IEM_MC_BEGIN(0, 1);
8847 IEM_MC_LOCAL(uint64_t, uSrc);
8848
8849 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8850 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8851
8852 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8853 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, uSrc);
8854 IEM_MC_FPU_TO_MMX_MODE();
8855
8856 IEM_MC_ADVANCE_RIP();
8857 IEM_MC_END();
8858 return VINF_SUCCESS;
8859 }
8860
8861 /**
8862 * @opdone
8863 * @opmnemonic udf20fd6mem
8864 * @opcode 0xd6
8865 * @opcodesub !11 mr/reg
8866 * @oppfx f2
8867 * @opunused intel-modrm
8868 * @opcpuid sse
8869 * @optest ->
8870 */
8871 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8872}
8873
8874/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
8875FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
8876{
8877 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8878 /** @todo testcase: Check that the instruction implicitly clears the high
8879 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8880 * and opcode modifications are made to work with the whole width (not
8881 * just 128). */
8882 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
8883 /* Docs says register only. */
8884 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8885 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8886 {
8887 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8888 IEM_MC_BEGIN(2, 0);
8889 IEM_MC_ARG(uint64_t *, pDst, 0);
8890 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8891 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8892 IEM_MC_PREPARE_FPU_USAGE();
8893 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8894 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8895 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8896 IEM_MC_ADVANCE_RIP();
8897 IEM_MC_END();
8898 return VINF_SUCCESS;
8899 }
8900 return IEMOP_RAISE_INVALID_OPCODE();
8901}
8902
8903/** Opcode 0x66 0x0f 0xd7 - */
8904FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
8905{
8906 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8907 /** @todo testcase: Check that the instruction implicitly clears the high
8908 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8909 * and opcode modifications are made to work with the whole width (not
8910 * just 128). */
8911 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
8912 /* Docs says register only. */
8913 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8914 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8915 {
8916 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8917 IEM_MC_BEGIN(2, 0);
8918 IEM_MC_ARG(uint64_t *, pDst, 0);
8919 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8920 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8921 IEM_MC_PREPARE_SSE_USAGE();
8922 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8923 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8924 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8925 IEM_MC_ADVANCE_RIP();
8926 IEM_MC_END();
8927 return VINF_SUCCESS;
8928 }
8929 return IEMOP_RAISE_INVALID_OPCODE();
8930}
8931
8932/* Opcode 0xf3 0x0f 0xd7 - invalid */
8933/* Opcode 0xf2 0x0f 0xd7 - invalid */
8934
8935
8936/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
8937FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
8938/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
8939FNIEMOP_STUB(iemOp_psubusb_Vx_W);
8940/* Opcode 0xf3 0x0f 0xd8 - invalid */
8941/* Opcode 0xf2 0x0f 0xd8 - invalid */
8942
8943/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
8944FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
8945/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
8946FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
8947/* Opcode 0xf3 0x0f 0xd9 - invalid */
8948/* Opcode 0xf2 0x0f 0xd9 - invalid */
8949
8950/** Opcode 0x0f 0xda - pminub Pq, Qq */
8951FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
8952/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
8953FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
8954/* Opcode 0xf3 0x0f 0xda - invalid */
8955/* Opcode 0xf2 0x0f 0xda - invalid */
8956
8957/** Opcode 0x0f 0xdb - pand Pq, Qq */
8958FNIEMOP_STUB(iemOp_pand_Pq_Qq);
8959/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
8960FNIEMOP_STUB(iemOp_pand_Vx_W);
8961/* Opcode 0xf3 0x0f 0xdb - invalid */
8962/* Opcode 0xf2 0x0f 0xdb - invalid */
8963
8964/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
8965FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
8966/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
8967FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
8968/* Opcode 0xf3 0x0f 0xdc - invalid */
8969/* Opcode 0xf2 0x0f 0xdc - invalid */
8970
8971/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
8972FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
8973/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
8974FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
8975/* Opcode 0xf3 0x0f 0xdd - invalid */
8976/* Opcode 0xf2 0x0f 0xdd - invalid */
8977
8978/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
8979FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
8980/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
8981FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
8982/* Opcode 0xf3 0x0f 0xde - invalid */
8983/* Opcode 0xf2 0x0f 0xde - invalid */
8984
8985/** Opcode 0x0f 0xdf - pandn Pq, Qq */
8986FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
8987/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
8988FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
8989/* Opcode 0xf3 0x0f 0xdf - invalid */
8990/* Opcode 0xf2 0x0f 0xdf - invalid */
8991
8992/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
8993FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
8994/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
8995FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
8996/* Opcode 0xf3 0x0f 0xe0 - invalid */
8997/* Opcode 0xf2 0x0f 0xe0 - invalid */
8998
8999/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
9000FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
9001/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
9002FNIEMOP_STUB(iemOp_psraw_Vx_W);
9003/* Opcode 0xf3 0x0f 0xe1 - invalid */
9004/* Opcode 0xf2 0x0f 0xe1 - invalid */
9005
9006/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
9007FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
9008/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
9009FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
9010/* Opcode 0xf3 0x0f 0xe2 - invalid */
9011/* Opcode 0xf2 0x0f 0xe2 - invalid */
9012
9013/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
9014FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
9015/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
9016FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
9017/* Opcode 0xf3 0x0f 0xe3 - invalid */
9018/* Opcode 0xf2 0x0f 0xe3 - invalid */
9019
9020/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
9021FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
9022/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
9023FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
9024/* Opcode 0xf3 0x0f 0xe4 - invalid */
9025/* Opcode 0xf2 0x0f 0xe4 - invalid */
9026
9027/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
9028FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
9029/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
9030FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
9031/* Opcode 0xf3 0x0f 0xe5 - invalid */
9032/* Opcode 0xf2 0x0f 0xe5 - invalid */
9033
9034/* Opcode 0x0f 0xe6 - invalid */
9035/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
9036FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
9037/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
9038FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
9039/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
9040FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
9041
9042
9043/**
9044 * @opcode 0xe7
9045 * @opcodesub !11 mr/reg
9046 * @oppfx none
9047 * @opcpuid sse
9048 * @opgroup og_sse1_cachect
9049 * @opxcpttype none
9050 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
9051 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
9052 */
9053FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
9054{
9055 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9057 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9058 {
9059 /* Register, memory. */
9060 IEM_MC_BEGIN(0, 2);
9061 IEM_MC_LOCAL(uint64_t, uSrc);
9062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9063
9064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9066 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
9067 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9068
9069 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9070 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9071 IEM_MC_FPU_TO_MMX_MODE();
9072
9073 IEM_MC_ADVANCE_RIP();
9074 IEM_MC_END();
9075 return VINF_SUCCESS;
9076 }
9077 /**
9078 * @opdone
9079 * @opmnemonic ud0fe7reg
9080 * @opcode 0xe7
9081 * @opcodesub 11 mr/reg
9082 * @oppfx none
9083 * @opunused immediate
9084 * @opcpuid sse
9085 * @optest ->
9086 */
9087 return IEMOP_RAISE_INVALID_OPCODE();
9088}
9089
9090/**
9091 * @opcode 0xe7
9092 * @opcodesub !11 mr/reg
9093 * @oppfx 0x66
9094 * @opcpuid sse2
9095 * @opgroup og_sse2_cachect
9096 * @opxcpttype 1
9097 * @optest op1=-1 op2=2 -> op1=2
9098 * @optest op1=0 op2=-42 -> op1=-42
9099 */
9100FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
9101{
9102 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9103 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9104 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9105 {
9106 /* Register, memory. */
9107 IEM_MC_BEGIN(0, 2);
9108 IEM_MC_LOCAL(RTUINT128U, uSrc);
9109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9110
9111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9113 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9114 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9115
9116 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9117 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9118
9119 IEM_MC_ADVANCE_RIP();
9120 IEM_MC_END();
9121 return VINF_SUCCESS;
9122 }
9123
9124 /**
9125 * @opdone
9126 * @opmnemonic ud660fe7reg
9127 * @opcode 0xe7
9128 * @opcodesub 11 mr/reg
9129 * @oppfx 0x66
9130 * @opunused immediate
9131 * @opcpuid sse
9132 * @optest ->
9133 */
9134 return IEMOP_RAISE_INVALID_OPCODE();
9135}
9136
9137/* Opcode 0xf3 0x0f 0xe7 - invalid */
9138/* Opcode 0xf2 0x0f 0xe7 - invalid */
9139
9140
9141/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
9142FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
9143/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
9144FNIEMOP_STUB(iemOp_psubsb_Vx_W);
9145/* Opcode 0xf3 0x0f 0xe8 - invalid */
9146/* Opcode 0xf2 0x0f 0xe8 - invalid */
9147
9148/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
9149FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
9150/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
9151FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
9152/* Opcode 0xf3 0x0f 0xe9 - invalid */
9153/* Opcode 0xf2 0x0f 0xe9 - invalid */
9154
9155/** Opcode 0x0f 0xea - pminsw Pq, Qq */
9156FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
9157/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
9158FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
9159/* Opcode 0xf3 0x0f 0xea - invalid */
9160/* Opcode 0xf2 0x0f 0xea - invalid */
9161
9162/** Opcode 0x0f 0xeb - por Pq, Qq */
9163FNIEMOP_STUB(iemOp_por_Pq_Qq);
9164/** Opcode 0x66 0x0f 0xeb - por Vx, W */
9165FNIEMOP_STUB(iemOp_por_Vx_W);
9166/* Opcode 0xf3 0x0f 0xeb - invalid */
9167/* Opcode 0xf2 0x0f 0xeb - invalid */
9168
9169/** Opcode 0x0f 0xec - paddsb Pq, Qq */
9170FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
9171/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
9172FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
9173/* Opcode 0xf3 0x0f 0xec - invalid */
9174/* Opcode 0xf2 0x0f 0xec - invalid */
9175
9176/** Opcode 0x0f 0xed - paddsw Pq, Qq */
9177FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
9178/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
9179FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
9180/* Opcode 0xf3 0x0f 0xed - invalid */
9181/* Opcode 0xf2 0x0f 0xed - invalid */
9182
9183/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
9184FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
9185/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
9186FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
9187/* Opcode 0xf3 0x0f 0xee - invalid */
9188/* Opcode 0xf2 0x0f 0xee - invalid */
9189
9190
9191/** Opcode 0x0f 0xef - pxor Pq, Qq */
9192FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
9193{
9194 IEMOP_MNEMONIC(pxor, "pxor");
9195 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
9196}
9197
9198/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
9199FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
9200{
9201 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
9202 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
9203}
9204
9205/* Opcode 0xf3 0x0f 0xef - invalid */
9206/* Opcode 0xf2 0x0f 0xef - invalid */
9207
9208/* Opcode 0x0f 0xf0 - invalid */
9209/* Opcode 0x66 0x0f 0xf0 - invalid */
9210/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
9211FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
9212
9213/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
9214FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
9215/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
9216FNIEMOP_STUB(iemOp_psllw_Vx_W);
9217/* Opcode 0xf2 0x0f 0xf1 - invalid */
9218
9219/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
9220FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
9221/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
9222FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
9223/* Opcode 0xf2 0x0f 0xf2 - invalid */
9224
9225/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
9226FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
9227/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
9228FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
9229/* Opcode 0xf2 0x0f 0xf3 - invalid */
9230
9231/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
9232FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
9233/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
9234FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
9235/* Opcode 0xf2 0x0f 0xf4 - invalid */
9236
9237/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
9238FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
9239/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
9240FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
9241/* Opcode 0xf2 0x0f 0xf5 - invalid */
9242
9243/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
9244FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
9245/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
9246FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
9247/* Opcode 0xf2 0x0f 0xf6 - invalid */
9248
9249/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
9250FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
9251/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
9252FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
9253/* Opcode 0xf2 0x0f 0xf7 - invalid */
9254
9255/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
9256FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
9257/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
9258FNIEMOP_STUB(iemOp_psubb_Vx_W);
9259/* Opcode 0xf2 0x0f 0xf8 - invalid */
9260
9261/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
9262FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
9263/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
9264FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
9265/* Opcode 0xf2 0x0f 0xf9 - invalid */
9266
9267/** Opcode 0x0f 0xfa - psubd Pq, Qq */
9268FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
9269/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
9270FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
9271/* Opcode 0xf2 0x0f 0xfa - invalid */
9272
9273/** Opcode 0x0f 0xfb - psubq Pq, Qq */
9274FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
9275/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
9276FNIEMOP_STUB(iemOp_psubq_Vx_W);
9277/* Opcode 0xf2 0x0f 0xfb - invalid */
9278
9279/** Opcode 0x0f 0xfc - paddb Pq, Qq */
9280FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
9281/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
9282FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
9283/* Opcode 0xf2 0x0f 0xfc - invalid */
9284
9285/** Opcode 0x0f 0xfd - paddw Pq, Qq */
9286FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
9287/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
9288FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
9289/* Opcode 0xf2 0x0f 0xfd - invalid */
9290
9291/** Opcode 0x0f 0xfe - paddd Pq, Qq */
9292FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
9293/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
9294FNIEMOP_STUB(iemOp_paddd_Vx_W);
9295/* Opcode 0xf2 0x0f 0xfe - invalid */
9296
9297
9298/** Opcode **** 0x0f 0xff - UD0 */
9299FNIEMOP_DEF(iemOp_ud0)
9300{
9301 IEMOP_MNEMONIC(ud0, "ud0");
9302 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
9303 {
9304 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
9305#ifndef TST_IEM_CHECK_MC
9306 RTGCPTR GCPtrEff;
9307 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
9308 if (rcStrict != VINF_SUCCESS)
9309 return rcStrict;
9310#endif
9311 IEMOP_HLP_DONE_DECODING();
9312 }
9313 return IEMOP_RAISE_INVALID_OPCODE();
9314}
9315
9316
9317
9318/**
9319 * Two byte opcode map, first byte 0x0f.
9320 *
9321 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
9322 * check if it needs updating as well when making changes.
9323 */
9324IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
9325{
9326 /* no prefix, 066h prefix f3h prefix, f2h prefix */
9327 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
9328 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
9329 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
9330 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
9331 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
9332 /* 0x05 */ IEMOP_X4(iemOp_syscall),
9333 /* 0x06 */ IEMOP_X4(iemOp_clts),
9334 /* 0x07 */ IEMOP_X4(iemOp_sysret),
9335 /* 0x08 */ IEMOP_X4(iemOp_invd),
9336 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
9337 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
9338 /* 0x0b */ IEMOP_X4(iemOp_ud2),
9339 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
9340 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
9341 /* 0x0e */ IEMOP_X4(iemOp_femms),
9342 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
9343
9344 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
9345 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
9346 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
9347 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9348 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9349 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9350 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
9351 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9352 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
9353 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
9354 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
9355 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
9356 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
9357 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
9358 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
9359 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
9360
9361 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
9362 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
9363 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
9364 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
9365 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
9366 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9367 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
9368 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9369 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9370 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9371 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
9372 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9373 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
9374 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
9375 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9376 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9377
9378 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
9379 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
9380 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
9381 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
9382 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
9383 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
9384 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
9385 /* 0x37 */ IEMOP_X4(iemOp_getsec),
9386 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
9387 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9388 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
9389 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9390 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9391 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9392 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9393 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9394
9395 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
9396 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
9397 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
9398 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
9399 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
9400 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
9401 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
9402 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
9403 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
9404 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
9405 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
9406 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
9407 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
9408 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
9409 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
9410 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
9411
9412 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9413 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
9414 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
9415 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
9416 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9417 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9418 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9419 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9420 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
9421 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
9422 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
9423 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
9424 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
9425 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
9426 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
9427 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
9428
9429 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9430 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9431 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9432 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9433 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9434 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9435 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9436 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9437 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9438 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9439 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9440 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9441 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9442 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9443 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9444 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
9445
9446 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
9447 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
9448 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
9449 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
9450 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9451 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9452 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9453 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9454
9455 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9456 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9457 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9458 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9459 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
9460 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
9461 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
9462 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
9463
9464 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
9465 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
9466 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
9467 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
9468 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
9469 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
9470 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
9471 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
9472 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
9473 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
9474 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
9475 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
9476 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
9477 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
9478 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
9479 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
9480
9481 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
9482 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
9483 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
9484 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
9485 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
9486 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
9487 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
9488 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
9489 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
9490 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
9491 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
9492 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
9493 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
9494 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
9495 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
9496 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
9497
9498 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
9499 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
9500 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
9501 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
9502 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
9503 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
9504 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
9505 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
9506 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
9507 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
9508 /* 0xaa */ IEMOP_X4(iemOp_rsm),
9509 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
9510 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
9511 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
9512 /* 0xae */ IEMOP_X4(iemOp_Grp15),
9513 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
9514
9515 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
9516 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
9517 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
9518 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
9519 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
9520 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
9521 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
9522 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
9523 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
9524 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
9525 /* 0xba */ IEMOP_X4(iemOp_Grp8),
9526 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
9527 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
9528 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
9529 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
9530 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
9531
9532 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
9533 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
9534 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
9535 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9536 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9537 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9538 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9539 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
9540 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
9541 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
9542 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
9543 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
9544 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
9545 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
9546 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
9547 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
9548
9549 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
9550 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9551 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9552 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9553 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9554 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9555 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
9556 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9557 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9558 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9559 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9560 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9561 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9562 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9563 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9564 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9565
9566 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9567 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9568 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9569 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9570 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9571 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9572 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
9573 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9574 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9575 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9576 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9577 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9578 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9579 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9580 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9581 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9582
9583 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
9584 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9585 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9586 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9587 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9588 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9589 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9590 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9591 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9592 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9593 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9594 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9595 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9596 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9597 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9598 /* 0xff */ IEMOP_X4(iemOp_ud0),
9599};
9600AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
9601
9602/** @} */
9603
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette