VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 65774

Last change on this file since 65774 was 65774, checked in by vboxsync, 8 years ago

IEM: Use prefix indexed tables for group 13.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 306.4 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 65774 2017-02-13 15:45:53Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/** @name ..... opcodes.
26 *
27 * @{
28 */
29
30/** @} */
31
32
33/** @name Two byte opcodes (first byte 0x0f).
34 *
35 * @{
36 */
37
38/** Opcode 0x0f 0x00 /0. */
39FNIEMOPRM_DEF(iemOp_Grp6_sldt)
40{
41 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
42 IEMOP_HLP_MIN_286();
43 IEMOP_HLP_NO_REAL_OR_V86_MODE();
44
45 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
46 {
47 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
48 switch (pVCpu->iem.s.enmEffOpSize)
49 {
50 case IEMMODE_16BIT:
51 IEM_MC_BEGIN(0, 1);
52 IEM_MC_LOCAL(uint16_t, u16Ldtr);
53 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
54 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
55 IEM_MC_ADVANCE_RIP();
56 IEM_MC_END();
57 break;
58
59 case IEMMODE_32BIT:
60 IEM_MC_BEGIN(0, 1);
61 IEM_MC_LOCAL(uint32_t, u32Ldtr);
62 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
63 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
64 IEM_MC_ADVANCE_RIP();
65 IEM_MC_END();
66 break;
67
68 case IEMMODE_64BIT:
69 IEM_MC_BEGIN(0, 1);
70 IEM_MC_LOCAL(uint64_t, u64Ldtr);
71 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
72 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
73 IEM_MC_ADVANCE_RIP();
74 IEM_MC_END();
75 break;
76
77 IEM_NOT_REACHED_DEFAULT_CASE_RET();
78 }
79 }
80 else
81 {
82 IEM_MC_BEGIN(0, 2);
83 IEM_MC_LOCAL(uint16_t, u16Ldtr);
84 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
85 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
86 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
87 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
88 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
89 IEM_MC_ADVANCE_RIP();
90 IEM_MC_END();
91 }
92 return VINF_SUCCESS;
93}
94
95
96/** Opcode 0x0f 0x00 /1. */
97FNIEMOPRM_DEF(iemOp_Grp6_str)
98{
99 IEMOP_MNEMONIC(str, "str Rv/Mw");
100 IEMOP_HLP_MIN_286();
101 IEMOP_HLP_NO_REAL_OR_V86_MODE();
102
103 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
104 {
105 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
106 switch (pVCpu->iem.s.enmEffOpSize)
107 {
108 case IEMMODE_16BIT:
109 IEM_MC_BEGIN(0, 1);
110 IEM_MC_LOCAL(uint16_t, u16Tr);
111 IEM_MC_FETCH_TR_U16(u16Tr);
112 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
113 IEM_MC_ADVANCE_RIP();
114 IEM_MC_END();
115 break;
116
117 case IEMMODE_32BIT:
118 IEM_MC_BEGIN(0, 1);
119 IEM_MC_LOCAL(uint32_t, u32Tr);
120 IEM_MC_FETCH_TR_U32(u32Tr);
121 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
122 IEM_MC_ADVANCE_RIP();
123 IEM_MC_END();
124 break;
125
126 case IEMMODE_64BIT:
127 IEM_MC_BEGIN(0, 1);
128 IEM_MC_LOCAL(uint64_t, u64Tr);
129 IEM_MC_FETCH_TR_U64(u64Tr);
130 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
131 IEM_MC_ADVANCE_RIP();
132 IEM_MC_END();
133 break;
134
135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
136 }
137 }
138 else
139 {
140 IEM_MC_BEGIN(0, 2);
141 IEM_MC_LOCAL(uint16_t, u16Tr);
142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
144 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
145 IEM_MC_FETCH_TR_U16(u16Tr);
146 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
147 IEM_MC_ADVANCE_RIP();
148 IEM_MC_END();
149 }
150 return VINF_SUCCESS;
151}
152
153
154/** Opcode 0x0f 0x00 /2. */
155FNIEMOPRM_DEF(iemOp_Grp6_lldt)
156{
157 IEMOP_MNEMONIC(lldt, "lldt Ew");
158 IEMOP_HLP_MIN_286();
159 IEMOP_HLP_NO_REAL_OR_V86_MODE();
160
161 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
162 {
163 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
164 IEM_MC_BEGIN(1, 0);
165 IEM_MC_ARG(uint16_t, u16Sel, 0);
166 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
167 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
168 IEM_MC_END();
169 }
170 else
171 {
172 IEM_MC_BEGIN(1, 1);
173 IEM_MC_ARG(uint16_t, u16Sel, 0);
174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
176 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
177 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
178 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
179 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
180 IEM_MC_END();
181 }
182 return VINF_SUCCESS;
183}
184
185
186/** Opcode 0x0f 0x00 /3. */
187FNIEMOPRM_DEF(iemOp_Grp6_ltr)
188{
189 IEMOP_MNEMONIC(ltr, "ltr Ew");
190 IEMOP_HLP_MIN_286();
191 IEMOP_HLP_NO_REAL_OR_V86_MODE();
192
193 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
194 {
195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
196 IEM_MC_BEGIN(1, 0);
197 IEM_MC_ARG(uint16_t, u16Sel, 0);
198 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
199 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
200 IEM_MC_END();
201 }
202 else
203 {
204 IEM_MC_BEGIN(1, 1);
205 IEM_MC_ARG(uint16_t, u16Sel, 0);
206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
209 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
210 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
211 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
212 IEM_MC_END();
213 }
214 return VINF_SUCCESS;
215}
216
217
218/** Opcode 0x0f 0x00 /3. */
219FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
220{
221 IEMOP_HLP_MIN_286();
222 IEMOP_HLP_NO_REAL_OR_V86_MODE();
223
224 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
225 {
226 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
227 IEM_MC_BEGIN(2, 0);
228 IEM_MC_ARG(uint16_t, u16Sel, 0);
229 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
230 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
231 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
232 IEM_MC_END();
233 }
234 else
235 {
236 IEM_MC_BEGIN(2, 1);
237 IEM_MC_ARG(uint16_t, u16Sel, 0);
238 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
241 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
242 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
243 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
244 IEM_MC_END();
245 }
246 return VINF_SUCCESS;
247}
248
249
250/** Opcode 0x0f 0x00 /4. */
251FNIEMOPRM_DEF(iemOp_Grp6_verr)
252{
253 IEMOP_MNEMONIC(verr, "verr Ew");
254 IEMOP_HLP_MIN_286();
255 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
256}
257
258
259/** Opcode 0x0f 0x00 /5. */
260FNIEMOPRM_DEF(iemOp_Grp6_verw)
261{
262 IEMOP_MNEMONIC(verw, "verw Ew");
263 IEMOP_HLP_MIN_286();
264 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
265}
266
267
268/**
269 * Group 6 jump table.
270 */
271IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
272{
273 iemOp_Grp6_sldt,
274 iemOp_Grp6_str,
275 iemOp_Grp6_lldt,
276 iemOp_Grp6_ltr,
277 iemOp_Grp6_verr,
278 iemOp_Grp6_verw,
279 iemOp_InvalidWithRM,
280 iemOp_InvalidWithRM
281};
282
283/** Opcode 0x0f 0x00. */
284FNIEMOP_DEF(iemOp_Grp6)
285{
286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
287 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
288}
289
290
291/** Opcode 0x0f 0x01 /0. */
292FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
293{
294 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
295 IEMOP_HLP_MIN_286();
296 IEMOP_HLP_64BIT_OP_SIZE();
297 IEM_MC_BEGIN(2, 1);
298 IEM_MC_ARG(uint8_t, iEffSeg, 0);
299 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
302 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
303 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
304 IEM_MC_END();
305 return VINF_SUCCESS;
306}
307
308
309/** Opcode 0x0f 0x01 /0. */
310FNIEMOP_DEF(iemOp_Grp7_vmcall)
311{
312 IEMOP_BITCH_ABOUT_STUB();
313 return IEMOP_RAISE_INVALID_OPCODE();
314}
315
316
317/** Opcode 0x0f 0x01 /0. */
318FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
319{
320 IEMOP_BITCH_ABOUT_STUB();
321 return IEMOP_RAISE_INVALID_OPCODE();
322}
323
324
325/** Opcode 0x0f 0x01 /0. */
326FNIEMOP_DEF(iemOp_Grp7_vmresume)
327{
328 IEMOP_BITCH_ABOUT_STUB();
329 return IEMOP_RAISE_INVALID_OPCODE();
330}
331
332
333/** Opcode 0x0f 0x01 /0. */
334FNIEMOP_DEF(iemOp_Grp7_vmxoff)
335{
336 IEMOP_BITCH_ABOUT_STUB();
337 return IEMOP_RAISE_INVALID_OPCODE();
338}
339
340
341/** Opcode 0x0f 0x01 /1. */
342FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
343{
344 IEMOP_MNEMONIC(sidt, "sidt Ms");
345 IEMOP_HLP_MIN_286();
346 IEMOP_HLP_64BIT_OP_SIZE();
347 IEM_MC_BEGIN(2, 1);
348 IEM_MC_ARG(uint8_t, iEffSeg, 0);
349 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
352 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
353 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
354 IEM_MC_END();
355 return VINF_SUCCESS;
356}
357
358
359/** Opcode 0x0f 0x01 /1. */
360FNIEMOP_DEF(iemOp_Grp7_monitor)
361{
362 IEMOP_MNEMONIC(monitor, "monitor");
363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
364 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
365}
366
367
368/** Opcode 0x0f 0x01 /1. */
369FNIEMOP_DEF(iemOp_Grp7_mwait)
370{
371 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
373 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
374}
375
376
377/** Opcode 0x0f 0x01 /2. */
378FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
379{
380 IEMOP_MNEMONIC(lgdt, "lgdt");
381 IEMOP_HLP_64BIT_OP_SIZE();
382 IEM_MC_BEGIN(3, 1);
383 IEM_MC_ARG(uint8_t, iEffSeg, 0);
384 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
385 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
388 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
389 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
390 IEM_MC_END();
391 return VINF_SUCCESS;
392}
393
394
395/** Opcode 0x0f 0x01 0xd0. */
396FNIEMOP_DEF(iemOp_Grp7_xgetbv)
397{
398 IEMOP_MNEMONIC(xgetbv, "xgetbv");
399 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
400 {
401 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
402 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
403 }
404 return IEMOP_RAISE_INVALID_OPCODE();
405}
406
407
408/** Opcode 0x0f 0x01 0xd1. */
409FNIEMOP_DEF(iemOp_Grp7_xsetbv)
410{
411 IEMOP_MNEMONIC(xsetbv, "xsetbv");
412 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
413 {
414 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
416 }
417 return IEMOP_RAISE_INVALID_OPCODE();
418}
419
420
421/** Opcode 0x0f 0x01 /3. */
422FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
423{
424 IEMOP_MNEMONIC(lidt, "lidt");
425 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
426 ? IEMMODE_64BIT
427 : pVCpu->iem.s.enmEffOpSize;
428 IEM_MC_BEGIN(3, 1);
429 IEM_MC_ARG(uint8_t, iEffSeg, 0);
430 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
431 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
434 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
435 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
436 IEM_MC_END();
437 return VINF_SUCCESS;
438}
439
440
441/** Opcode 0x0f 0x01 0xd8. */
442FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
443
444/** Opcode 0x0f 0x01 0xd9. */
445FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
446
447/** Opcode 0x0f 0x01 0xda. */
448FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
449
450/** Opcode 0x0f 0x01 0xdb. */
451FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
452
453/** Opcode 0x0f 0x01 0xdc. */
454FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
455
456/** Opcode 0x0f 0x01 0xdd. */
457FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
458
459/** Opcode 0x0f 0x01 0xde. */
460FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
461
462/** Opcode 0x0f 0x01 0xdf. */
463FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
464
465/** Opcode 0x0f 0x01 /4. */
466FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
467{
468 IEMOP_MNEMONIC(smsw, "smsw");
469 IEMOP_HLP_MIN_286();
470 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
471 {
472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
473 switch (pVCpu->iem.s.enmEffOpSize)
474 {
475 case IEMMODE_16BIT:
476 IEM_MC_BEGIN(0, 1);
477 IEM_MC_LOCAL(uint16_t, u16Tmp);
478 IEM_MC_FETCH_CR0_U16(u16Tmp);
479 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
480 { /* likely */ }
481 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
482 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
483 else
484 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
485 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
486 IEM_MC_ADVANCE_RIP();
487 IEM_MC_END();
488 return VINF_SUCCESS;
489
490 case IEMMODE_32BIT:
491 IEM_MC_BEGIN(0, 1);
492 IEM_MC_LOCAL(uint32_t, u32Tmp);
493 IEM_MC_FETCH_CR0_U32(u32Tmp);
494 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
495 IEM_MC_ADVANCE_RIP();
496 IEM_MC_END();
497 return VINF_SUCCESS;
498
499 case IEMMODE_64BIT:
500 IEM_MC_BEGIN(0, 1);
501 IEM_MC_LOCAL(uint64_t, u64Tmp);
502 IEM_MC_FETCH_CR0_U64(u64Tmp);
503 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
504 IEM_MC_ADVANCE_RIP();
505 IEM_MC_END();
506 return VINF_SUCCESS;
507
508 IEM_NOT_REACHED_DEFAULT_CASE_RET();
509 }
510 }
511 else
512 {
513 /* Ignore operand size here, memory refs are always 16-bit. */
514 IEM_MC_BEGIN(0, 2);
515 IEM_MC_LOCAL(uint16_t, u16Tmp);
516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
519 IEM_MC_FETCH_CR0_U16(u16Tmp);
520 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
521 { /* likely */ }
522 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
523 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
524 else
525 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
526 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
527 IEM_MC_ADVANCE_RIP();
528 IEM_MC_END();
529 return VINF_SUCCESS;
530 }
531}
532
533
534/** Opcode 0x0f 0x01 /6. */
535FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
536{
537 /* The operand size is effectively ignored, all is 16-bit and only the
538 lower 3-bits are used. */
539 IEMOP_MNEMONIC(lmsw, "lmsw");
540 IEMOP_HLP_MIN_286();
541 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
542 {
543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
544 IEM_MC_BEGIN(1, 0);
545 IEM_MC_ARG(uint16_t, u16Tmp, 0);
546 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
547 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
548 IEM_MC_END();
549 }
550 else
551 {
552 IEM_MC_BEGIN(1, 1);
553 IEM_MC_ARG(uint16_t, u16Tmp, 0);
554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
557 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
558 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
559 IEM_MC_END();
560 }
561 return VINF_SUCCESS;
562}
563
564
565/** Opcode 0x0f 0x01 /7. */
566FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
567{
568 IEMOP_MNEMONIC(invlpg, "invlpg");
569 IEMOP_HLP_MIN_486();
570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
571 IEM_MC_BEGIN(1, 1);
572 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
574 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
575 IEM_MC_END();
576 return VINF_SUCCESS;
577}
578
579
580/** Opcode 0x0f 0x01 /7. */
581FNIEMOP_DEF(iemOp_Grp7_swapgs)
582{
583 IEMOP_MNEMONIC(swapgs, "swapgs");
584 IEMOP_HLP_ONLY_64BIT();
585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
586 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
587}
588
589
590/** Opcode 0x0f 0x01 /7. */
591FNIEMOP_DEF(iemOp_Grp7_rdtscp)
592{
593 NOREF(pVCpu);
594 IEMOP_BITCH_ABOUT_STUB();
595 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
596}
597
598
599/**
600 * Group 7 jump table, memory variant.
601 */
602IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
603{
604 iemOp_Grp7_sgdt,
605 iemOp_Grp7_sidt,
606 iemOp_Grp7_lgdt,
607 iemOp_Grp7_lidt,
608 iemOp_Grp7_smsw,
609 iemOp_InvalidWithRM,
610 iemOp_Grp7_lmsw,
611 iemOp_Grp7_invlpg
612};
613
614
615/** Opcode 0x0f 0x01. */
616FNIEMOP_DEF(iemOp_Grp7)
617{
618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
619 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
620 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
621
622 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
623 {
624 case 0:
625 switch (bRm & X86_MODRM_RM_MASK)
626 {
627 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
628 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
629 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
630 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
631 }
632 return IEMOP_RAISE_INVALID_OPCODE();
633
634 case 1:
635 switch (bRm & X86_MODRM_RM_MASK)
636 {
637 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
638 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
639 }
640 return IEMOP_RAISE_INVALID_OPCODE();
641
642 case 2:
643 switch (bRm & X86_MODRM_RM_MASK)
644 {
645 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
646 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
647 }
648 return IEMOP_RAISE_INVALID_OPCODE();
649
650 case 3:
651 switch (bRm & X86_MODRM_RM_MASK)
652 {
653 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
654 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
655 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
656 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
657 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
658 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
659 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
660 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
661 IEM_NOT_REACHED_DEFAULT_CASE_RET();
662 }
663
664 case 4:
665 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
666
667 case 5:
668 return IEMOP_RAISE_INVALID_OPCODE();
669
670 case 6:
671 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
672
673 case 7:
674 switch (bRm & X86_MODRM_RM_MASK)
675 {
676 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
677 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
678 }
679 return IEMOP_RAISE_INVALID_OPCODE();
680
681 IEM_NOT_REACHED_DEFAULT_CASE_RET();
682 }
683}
684
685/** Opcode 0x0f 0x00 /3. */
686FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
687{
688 IEMOP_HLP_NO_REAL_OR_V86_MODE();
689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
690
691 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
692 {
693 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
694 switch (pVCpu->iem.s.enmEffOpSize)
695 {
696 case IEMMODE_16BIT:
697 {
698 IEM_MC_BEGIN(3, 0);
699 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
700 IEM_MC_ARG(uint16_t, u16Sel, 1);
701 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
702
703 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
704 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
705 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
706
707 IEM_MC_END();
708 return VINF_SUCCESS;
709 }
710
711 case IEMMODE_32BIT:
712 case IEMMODE_64BIT:
713 {
714 IEM_MC_BEGIN(3, 0);
715 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
716 IEM_MC_ARG(uint16_t, u16Sel, 1);
717 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
718
719 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
720 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
721 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
722
723 IEM_MC_END();
724 return VINF_SUCCESS;
725 }
726
727 IEM_NOT_REACHED_DEFAULT_CASE_RET();
728 }
729 }
730 else
731 {
732 switch (pVCpu->iem.s.enmEffOpSize)
733 {
734 case IEMMODE_16BIT:
735 {
736 IEM_MC_BEGIN(3, 1);
737 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
738 IEM_MC_ARG(uint16_t, u16Sel, 1);
739 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
741
742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
743 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
744
745 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
746 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
747 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
748
749 IEM_MC_END();
750 return VINF_SUCCESS;
751 }
752
753 case IEMMODE_32BIT:
754 case IEMMODE_64BIT:
755 {
756 IEM_MC_BEGIN(3, 1);
757 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
758 IEM_MC_ARG(uint16_t, u16Sel, 1);
759 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
761
762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
763 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
764/** @todo testcase: make sure it's a 16-bit read. */
765
766 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
767 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
768 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
769
770 IEM_MC_END();
771 return VINF_SUCCESS;
772 }
773
774 IEM_NOT_REACHED_DEFAULT_CASE_RET();
775 }
776 }
777}
778
779
780
781/** Opcode 0x0f 0x02. */
782FNIEMOP_DEF(iemOp_lar_Gv_Ew)
783{
784 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
785 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
786}
787
788
789/** Opcode 0x0f 0x03. */
790FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
791{
792 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
793 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
794}
795
796
797/** Opcode 0x0f 0x05. */
798FNIEMOP_DEF(iemOp_syscall)
799{
800 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
802 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
803}
804
805
806/** Opcode 0x0f 0x06. */
807FNIEMOP_DEF(iemOp_clts)
808{
809 IEMOP_MNEMONIC(clts, "clts");
810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
811 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
812}
813
814
815/** Opcode 0x0f 0x07. */
816FNIEMOP_DEF(iemOp_sysret)
817{
818 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
820 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
821}
822
823
824/** Opcode 0x0f 0x08. */
825FNIEMOP_STUB(iemOp_invd);
826// IEMOP_HLP_MIN_486();
827
828
829/** Opcode 0x0f 0x09. */
830FNIEMOP_DEF(iemOp_wbinvd)
831{
832 IEMOP_MNEMONIC(wbinvd, "wbinvd");
833 IEMOP_HLP_MIN_486();
834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
835 IEM_MC_BEGIN(0, 0);
836 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
837 IEM_MC_ADVANCE_RIP();
838 IEM_MC_END();
839 return VINF_SUCCESS; /* ignore for now */
840}
841
842
843/** Opcode 0x0f 0x0b. */
844FNIEMOP_DEF(iemOp_ud2)
845{
846 IEMOP_MNEMONIC(ud2, "ud2");
847 return IEMOP_RAISE_INVALID_OPCODE();
848}
849
850/** Opcode 0x0f 0x0d. */
851FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
852{
853 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
854 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
855 {
856 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
857 return IEMOP_RAISE_INVALID_OPCODE();
858 }
859
860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
861 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
862 {
863 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
864 return IEMOP_RAISE_INVALID_OPCODE();
865 }
866
867 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
868 {
869 case 2: /* Aliased to /0 for the time being. */
870 case 4: /* Aliased to /0 for the time being. */
871 case 5: /* Aliased to /0 for the time being. */
872 case 6: /* Aliased to /0 for the time being. */
873 case 7: /* Aliased to /0 for the time being. */
874 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
875 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
876 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
877 IEM_NOT_REACHED_DEFAULT_CASE_RET();
878 }
879
880 IEM_MC_BEGIN(0, 1);
881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
884 /* Currently a NOP. */
885 NOREF(GCPtrEffSrc);
886 IEM_MC_ADVANCE_RIP();
887 IEM_MC_END();
888 return VINF_SUCCESS;
889}
890
891
892/** Opcode 0x0f 0x0e. */
893FNIEMOP_STUB(iemOp_femms);
894
895
896/** Opcode 0x0f 0x0f 0x0c. */
897FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
898
899/** Opcode 0x0f 0x0f 0x0d. */
900FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
901
902/** Opcode 0x0f 0x0f 0x1c. */
903FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
904
905/** Opcode 0x0f 0x0f 0x1d. */
906FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
907
908/** Opcode 0x0f 0x0f 0x8a. */
909FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
910
911/** Opcode 0x0f 0x0f 0x8e. */
912FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
913
914/** Opcode 0x0f 0x0f 0x90. */
915FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
916
917/** Opcode 0x0f 0x0f 0x94. */
918FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
919
920/** Opcode 0x0f 0x0f 0x96. */
921FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
922
923/** Opcode 0x0f 0x0f 0x97. */
924FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
925
926/** Opcode 0x0f 0x0f 0x9a. */
927FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
928
929/** Opcode 0x0f 0x0f 0x9e. */
930FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
931
932/** Opcode 0x0f 0x0f 0xa0. */
933FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
934
935/** Opcode 0x0f 0x0f 0xa4. */
936FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
937
938/** Opcode 0x0f 0x0f 0xa6. */
939FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
940
941/** Opcode 0x0f 0x0f 0xa7. */
942FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
943
944/** Opcode 0x0f 0x0f 0xaa. */
945FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
946
947/** Opcode 0x0f 0x0f 0xae. */
948FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
949
950/** Opcode 0x0f 0x0f 0xb0. */
951FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
952
953/** Opcode 0x0f 0x0f 0xb4. */
954FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
955
956/** Opcode 0x0f 0x0f 0xb6. */
957FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
958
959/** Opcode 0x0f 0x0f 0xb7. */
960FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
961
962/** Opcode 0x0f 0x0f 0xbb. */
963FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
964
965/** Opcode 0x0f 0x0f 0xbf. */
966FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
967
968
969/** Opcode 0x0f 0x0f. */
970FNIEMOP_DEF(iemOp_3Dnow)
971{
972 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
973 {
974 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
975 return IEMOP_RAISE_INVALID_OPCODE();
976 }
977
978 /* This is pretty sparse, use switch instead of table. */
979 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
980 switch (b)
981 {
982 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
983 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
984 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
985 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
986 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
987 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
988 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
989 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
990 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
991 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
992 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
993 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
994 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
995 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
996 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
997 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
998 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
999 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1000 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1001 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1002 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1003 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1004 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1005 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1006 default:
1007 return IEMOP_RAISE_INVALID_OPCODE();
1008 }
1009}
1010
1011
1012/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1013FNIEMOP_STUB(iemOp_vmovups_Vps_Wps);
1014/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1015FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
1016/** Opcode 0xf3 0x0f 0x10 - vmovss Vx, Hx, Wss */
1017FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss);
1018/** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1019FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
1020
1021
1022/** Opcode 0x0f 0x11 - vmovups Wps, Vps */
1023FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
1024{
1025 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1026 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1027 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1028 {
1029 /*
1030 * Register, register.
1031 */
1032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1033 IEM_MC_BEGIN(0, 0);
1034 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1035 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1036 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1037 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1038 IEM_MC_ADVANCE_RIP();
1039 IEM_MC_END();
1040 }
1041 else
1042 {
1043 /*
1044 * Memory, register.
1045 */
1046 IEM_MC_BEGIN(0, 2);
1047 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1049
1050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1052 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1053 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1054
1055 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1056 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1057
1058 IEM_MC_ADVANCE_RIP();
1059 IEM_MC_END();
1060 }
1061 return VINF_SUCCESS;
1062}
1063
1064
1065/** Opcode 0x66 0x0f 0x11 - vmovupd Wpd,Vpd */
1066FNIEMOP_STUB(iemOp_vmovupd_Wpd_Vpd);
1067
1068/** Opcode 0xf3 0x0f 0x11 - vmovss Wss, Hx, Vss */
1069FNIEMOP_STUB(iemOp_vmovss_Wss_Hx_Vss);
1070
1071/** Opcode 0xf2 0x0f 0x11 - vmovsd Wsd, Hx, Vsd */
1072FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
1073{
1074 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1076 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1077 {
1078 /*
1079 * Register, register.
1080 */
1081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1082 IEM_MC_BEGIN(0, 1);
1083 IEM_MC_LOCAL(uint64_t, uSrc);
1084
1085 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1086 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1087 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1088 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1089
1090 IEM_MC_ADVANCE_RIP();
1091 IEM_MC_END();
1092 }
1093 else
1094 {
1095 /*
1096 * Memory, register.
1097 */
1098 IEM_MC_BEGIN(0, 2);
1099 IEM_MC_LOCAL(uint64_t, uSrc);
1100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1101
1102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1104 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1105 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1106
1107 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1108 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1109
1110 IEM_MC_ADVANCE_RIP();
1111 IEM_MC_END();
1112 }
1113 return VINF_SUCCESS;
1114}
1115
1116
1117/** Opcode 0x0f 0x12. */
1118FNIEMOP_STUB(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps); //NEXT
1119
1120/** Opcode 0x66 0x0f 0x12. */
1121FNIEMOP_STUB(iemOp_vmovlpd_Vq_Hq_Mq); //NEXT
1122
1123/** Opcode 0xf3 0x0f 0x12. */
1124FNIEMOP_STUB(iemOp_vmovsldup_Vx_Wx); //NEXT
1125
1126/** Opcode 0xf2 0x0f 0x12. */
1127FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); //NEXT
1128
1129/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1130FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq);
1131
1132/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1133FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1134{
1135 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1136 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1137 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1138 {
1139#if 0
1140 /*
1141 * Register, register.
1142 */
1143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1144 IEM_MC_BEGIN(0, 1);
1145 IEM_MC_LOCAL(uint64_t, uSrc);
1146 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1147 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1148 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1149 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1150 IEM_MC_ADVANCE_RIP();
1151 IEM_MC_END();
1152#else
1153 return IEMOP_RAISE_INVALID_OPCODE();
1154#endif
1155 }
1156 else
1157 {
1158 /*
1159 * Memory, register.
1160 */
1161 IEM_MC_BEGIN(0, 2);
1162 IEM_MC_LOCAL(uint64_t, uSrc);
1163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1164
1165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1167 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1168 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1169
1170 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1171 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1172
1173 IEM_MC_ADVANCE_RIP();
1174 IEM_MC_END();
1175 }
1176 return VINF_SUCCESS;
1177}
1178
1179/* Opcode 0xf3 0x0f 0x13 - invalid */
1180/* Opcode 0xf2 0x0f 0x13 - invalid */
1181
1182/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1183FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1184/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1185FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1186/* Opcode 0xf3 0x0f 0x14 - invalid */
1187/* Opcode 0xf2 0x0f 0x14 - invalid */
1188/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1189FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1190/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1191FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1192/* Opcode 0xf3 0x0f 0x15 - invalid */
1193/* Opcode 0xf2 0x0f 0x15 - invalid */
1194/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1195FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1196/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1197FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1198/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1199FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1200/* Opcode 0xf2 0x0f 0x16 - invalid */
1201/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1202FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1203/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1204FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1205/* Opcode 0xf3 0x0f 0x17 - invalid */
1206/* Opcode 0xf2 0x0f 0x17 - invalid */
1207
1208
1209/** Opcode 0x0f 0x18. */
1210FNIEMOP_DEF(iemOp_prefetch_Grp16)
1211{
1212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1213 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1214 {
1215 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1216 {
1217 case 4: /* Aliased to /0 for the time being according to AMD. */
1218 case 5: /* Aliased to /0 for the time being according to AMD. */
1219 case 6: /* Aliased to /0 for the time being according to AMD. */
1220 case 7: /* Aliased to /0 for the time being according to AMD. */
1221 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1222 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1223 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1224 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1225 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1226 }
1227
1228 IEM_MC_BEGIN(0, 1);
1229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1232 /* Currently a NOP. */
1233 NOREF(GCPtrEffSrc);
1234 IEM_MC_ADVANCE_RIP();
1235 IEM_MC_END();
1236 return VINF_SUCCESS;
1237 }
1238
1239 return IEMOP_RAISE_INVALID_OPCODE();
1240}
1241
1242
1243/** Opcode 0x0f 0x19..0x1f. */
1244FNIEMOP_DEF(iemOp_nop_Ev)
1245{
1246 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1247 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1248 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1249 {
1250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1251 IEM_MC_BEGIN(0, 0);
1252 IEM_MC_ADVANCE_RIP();
1253 IEM_MC_END();
1254 }
1255 else
1256 {
1257 IEM_MC_BEGIN(0, 1);
1258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1261 /* Currently a NOP. */
1262 NOREF(GCPtrEffSrc);
1263 IEM_MC_ADVANCE_RIP();
1264 IEM_MC_END();
1265 }
1266 return VINF_SUCCESS;
1267}
1268
1269
1270/** Opcode 0x0f 0x20. */
1271FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1272{
1273 /* mod is ignored, as is operand size overrides. */
1274 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1275 IEMOP_HLP_MIN_386();
1276 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1277 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1278 else
1279 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1280
1281 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1282 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1283 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1284 {
1285 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1286 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1287 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1288 iCrReg |= 8;
1289 }
1290 switch (iCrReg)
1291 {
1292 case 0: case 2: case 3: case 4: case 8:
1293 break;
1294 default:
1295 return IEMOP_RAISE_INVALID_OPCODE();
1296 }
1297 IEMOP_HLP_DONE_DECODING();
1298
1299 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1300}
1301
1302
1303/** Opcode 0x0f 0x21. */
1304FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1305{
1306 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1307 IEMOP_HLP_MIN_386();
1308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1310 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1311 return IEMOP_RAISE_INVALID_OPCODE();
1312 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1313 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1314 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1315}
1316
1317
1318/** Opcode 0x0f 0x22. */
1319FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1320{
1321 /* mod is ignored, as is operand size overrides. */
1322 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1323 IEMOP_HLP_MIN_386();
1324 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1325 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1326 else
1327 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1328
1329 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1330 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1331 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1332 {
1333 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1334 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1335 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1336 iCrReg |= 8;
1337 }
1338 switch (iCrReg)
1339 {
1340 case 0: case 2: case 3: case 4: case 8:
1341 break;
1342 default:
1343 return IEMOP_RAISE_INVALID_OPCODE();
1344 }
1345 IEMOP_HLP_DONE_DECODING();
1346
1347 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1348}
1349
1350
1351/** Opcode 0x0f 0x23. */
1352FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1353{
1354 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1355 IEMOP_HLP_MIN_386();
1356 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1358 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1359 return IEMOP_RAISE_INVALID_OPCODE();
1360 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1361 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1362 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1363}
1364
1365
1366/** Opcode 0x0f 0x24. */
1367FNIEMOP_DEF(iemOp_mov_Rd_Td)
1368{
1369 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1370 /** @todo works on 386 and 486. */
1371 /* The RM byte is not considered, see testcase. */
1372 return IEMOP_RAISE_INVALID_OPCODE();
1373}
1374
1375
1376/** Opcode 0x0f 0x26. */
1377FNIEMOP_DEF(iemOp_mov_Td_Rd)
1378{
1379 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1380 /** @todo works on 386 and 486. */
1381 /* The RM byte is not considered, see testcase. */
1382 return IEMOP_RAISE_INVALID_OPCODE();
1383}
1384
1385
1386/** Opcode 0x0f 0x28 - vmovaps Vps, Wps */
1387FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1388{
1389 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1390 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1391 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1392 {
1393 /*
1394 * Register, register.
1395 */
1396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1397 IEM_MC_BEGIN(0, 0);
1398 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1399 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1400 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1401 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1402 IEM_MC_ADVANCE_RIP();
1403 IEM_MC_END();
1404 }
1405 else
1406 {
1407 /*
1408 * Register, memory.
1409 */
1410 IEM_MC_BEGIN(0, 2);
1411 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1413
1414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1416 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1417 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1418
1419 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1420 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1421
1422 IEM_MC_ADVANCE_RIP();
1423 IEM_MC_END();
1424 }
1425 return VINF_SUCCESS;
1426}
1427
1428/** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */
1429FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1430{
1431 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1432 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1433 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1434 {
1435 /*
1436 * Register, register.
1437 */
1438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1439 IEM_MC_BEGIN(0, 0);
1440 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1441 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1442 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1443 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1444 IEM_MC_ADVANCE_RIP();
1445 IEM_MC_END();
1446 }
1447 else
1448 {
1449 /*
1450 * Register, memory.
1451 */
1452 IEM_MC_BEGIN(0, 2);
1453 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1455
1456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1458 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1459 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1460
1461 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1462 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1463
1464 IEM_MC_ADVANCE_RIP();
1465 IEM_MC_END();
1466 }
1467 return VINF_SUCCESS;
1468}
1469
1470/* Opcode 0xf3 0x0f 0x28 - invalid */
1471/* Opcode 0xf2 0x0f 0x28 - invalid */
1472
1473/** Opcode 0x0f 0x29 - vmovaps Wps, Vps */
1474FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1475{
1476 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1477 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1478 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1479 {
1480 /*
1481 * Register, register.
1482 */
1483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1484 IEM_MC_BEGIN(0, 0);
1485 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1486 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1487 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1488 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1489 IEM_MC_ADVANCE_RIP();
1490 IEM_MC_END();
1491 }
1492 else
1493 {
1494 /*
1495 * Memory, register.
1496 */
1497 IEM_MC_BEGIN(0, 2);
1498 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1500
1501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1503 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1504 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1505
1506 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1507 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1508
1509 IEM_MC_ADVANCE_RIP();
1510 IEM_MC_END();
1511 }
1512 return VINF_SUCCESS;
1513}
1514
1515/** Opcode 0x66 0x0f 0x29 - vmovapd Wpd,Vpd */
1516FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
1517{
1518 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
1519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1521 {
1522 /*
1523 * Register, register.
1524 */
1525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1526 IEM_MC_BEGIN(0, 0);
1527 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1528 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1529 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1530 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1531 IEM_MC_ADVANCE_RIP();
1532 IEM_MC_END();
1533 }
1534 else
1535 {
1536 /*
1537 * Memory, register.
1538 */
1539 IEM_MC_BEGIN(0, 2);
1540 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1542
1543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1545 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1546 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1547
1548 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1549 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1550
1551 IEM_MC_ADVANCE_RIP();
1552 IEM_MC_END();
1553 }
1554 return VINF_SUCCESS;
1555}
1556
1557/* Opcode 0xf3 0x0f 0x29 - invalid */
1558/* Opcode 0xf2 0x0f 0x29 - invalid */
1559
1560
1561/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
1562FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
1563/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
1564FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
1565/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
1566FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
1567/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
1568FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
1569
1570
1571/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
1572FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
1573{
1574 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
1575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1576 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1577 {
1578 /*
1579 * memory, register.
1580 */
1581 IEM_MC_BEGIN(0, 2);
1582 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1584
1585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1587 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1588 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1589
1590 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1591 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1592
1593 IEM_MC_ADVANCE_RIP();
1594 IEM_MC_END();
1595 }
1596 /* The register, register encoding is invalid. */
1597 else
1598 return IEMOP_RAISE_INVALID_OPCODE();
1599 return VINF_SUCCESS;
1600}
1601
1602/** Opcode 0x66 0x0f 0x2b - vmovntpd Mpd, Vpd */
1603FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
1604{
1605 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
1606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1607 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1608 {
1609 /*
1610 * memory, register.
1611 */
1612 IEM_MC_BEGIN(0, 2);
1613 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1615
1616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1618 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1619 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1620
1621 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1622 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1623
1624 IEM_MC_ADVANCE_RIP();
1625 IEM_MC_END();
1626 }
1627 /* The register, register encoding is invalid. */
1628 else
1629 return IEMOP_RAISE_INVALID_OPCODE();
1630 return VINF_SUCCESS;
1631}
1632/* Opcode 0xf3 0x0f 0x2b - invalid */
1633/* Opcode 0xf2 0x0f 0x2b - invalid */
1634
1635
1636/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
1637FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
1638/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
1639FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
1640/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
1641FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
1642/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
1643FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
1644
1645/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
1646FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
1647/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
1648FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
1649/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
1650FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
1651/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
1652FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
1653
1654/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
1655FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
1656/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
1657FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
1658/* Opcode 0xf3 0x0f 0x2e - invalid */
1659/* Opcode 0xf2 0x0f 0x2e - invalid */
1660
1661/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
1662FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
1663/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
1664FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
1665/* Opcode 0xf3 0x0f 0x2f - invalid */
1666/* Opcode 0xf2 0x0f 0x2f - invalid */
1667
1668/** Opcode 0x0f 0x30. */
1669FNIEMOP_DEF(iemOp_wrmsr)
1670{
1671 IEMOP_MNEMONIC(wrmsr, "wrmsr");
1672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1673 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1674}
1675
1676
1677/** Opcode 0x0f 0x31. */
1678FNIEMOP_DEF(iemOp_rdtsc)
1679{
1680 IEMOP_MNEMONIC(rdtsc, "rdtsc");
1681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1682 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1683}
1684
1685
1686/** Opcode 0x0f 0x33. */
1687FNIEMOP_DEF(iemOp_rdmsr)
1688{
1689 IEMOP_MNEMONIC(rdmsr, "rdmsr");
1690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1691 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1692}
1693
1694
1695/** Opcode 0x0f 0x34. */
1696FNIEMOP_STUB(iemOp_rdpmc);
1697/** Opcode 0x0f 0x34. */
1698FNIEMOP_STUB(iemOp_sysenter);
1699/** Opcode 0x0f 0x35. */
1700FNIEMOP_STUB(iemOp_sysexit);
1701/** Opcode 0x0f 0x37. */
1702FNIEMOP_STUB(iemOp_getsec);
1703/** Opcode 0x0f 0x38. */
1704FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1705/** Opcode 0x0f 0x3a. */
1706FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1707
1708
1709/**
1710 * Implements a conditional move.
1711 *
1712 * Wish there was an obvious way to do this where we could share and reduce
1713 * code bloat.
1714 *
1715 * @param a_Cnd The conditional "microcode" operation.
1716 */
1717#define CMOV_X(a_Cnd) \
1718 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1719 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1720 { \
1721 switch (pVCpu->iem.s.enmEffOpSize) \
1722 { \
1723 case IEMMODE_16BIT: \
1724 IEM_MC_BEGIN(0, 1); \
1725 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1726 a_Cnd { \
1727 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1728 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
1729 } IEM_MC_ENDIF(); \
1730 IEM_MC_ADVANCE_RIP(); \
1731 IEM_MC_END(); \
1732 return VINF_SUCCESS; \
1733 \
1734 case IEMMODE_32BIT: \
1735 IEM_MC_BEGIN(0, 1); \
1736 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1737 a_Cnd { \
1738 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1739 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
1740 } IEM_MC_ELSE() { \
1741 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
1742 } IEM_MC_ENDIF(); \
1743 IEM_MC_ADVANCE_RIP(); \
1744 IEM_MC_END(); \
1745 return VINF_SUCCESS; \
1746 \
1747 case IEMMODE_64BIT: \
1748 IEM_MC_BEGIN(0, 1); \
1749 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1750 a_Cnd { \
1751 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1752 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
1753 } IEM_MC_ENDIF(); \
1754 IEM_MC_ADVANCE_RIP(); \
1755 IEM_MC_END(); \
1756 return VINF_SUCCESS; \
1757 \
1758 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1759 } \
1760 } \
1761 else \
1762 { \
1763 switch (pVCpu->iem.s.enmEffOpSize) \
1764 { \
1765 case IEMMODE_16BIT: \
1766 IEM_MC_BEGIN(0, 2); \
1767 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1768 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1769 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1770 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1771 a_Cnd { \
1772 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
1773 } IEM_MC_ENDIF(); \
1774 IEM_MC_ADVANCE_RIP(); \
1775 IEM_MC_END(); \
1776 return VINF_SUCCESS; \
1777 \
1778 case IEMMODE_32BIT: \
1779 IEM_MC_BEGIN(0, 2); \
1780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1781 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1783 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1784 a_Cnd { \
1785 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
1786 } IEM_MC_ELSE() { \
1787 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
1788 } IEM_MC_ENDIF(); \
1789 IEM_MC_ADVANCE_RIP(); \
1790 IEM_MC_END(); \
1791 return VINF_SUCCESS; \
1792 \
1793 case IEMMODE_64BIT: \
1794 IEM_MC_BEGIN(0, 2); \
1795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1796 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1798 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1799 a_Cnd { \
1800 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
1801 } IEM_MC_ENDIF(); \
1802 IEM_MC_ADVANCE_RIP(); \
1803 IEM_MC_END(); \
1804 return VINF_SUCCESS; \
1805 \
1806 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1807 } \
1808 } do {} while (0)
1809
1810
1811
1812/** Opcode 0x0f 0x40. */
1813FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1814{
1815 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
1816 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1817}
1818
1819
1820/** Opcode 0x0f 0x41. */
1821FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1822{
1823 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
1824 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
1825}
1826
1827
1828/** Opcode 0x0f 0x42. */
1829FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
1830{
1831 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
1832 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
1833}
1834
1835
1836/** Opcode 0x0f 0x43. */
1837FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
1838{
1839 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
1840 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
1841}
1842
1843
1844/** Opcode 0x0f 0x44. */
1845FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
1846{
1847 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
1848 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
1849}
1850
1851
1852/** Opcode 0x0f 0x45. */
1853FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
1854{
1855 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
1856 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
1857}
1858
1859
1860/** Opcode 0x0f 0x46. */
1861FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
1862{
1863 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
1864 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1865}
1866
1867
1868/** Opcode 0x0f 0x47. */
1869FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
1870{
1871 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
1872 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1873}
1874
1875
1876/** Opcode 0x0f 0x48. */
1877FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
1878{
1879 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
1880 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
1881}
1882
1883
1884/** Opcode 0x0f 0x49. */
1885FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
1886{
1887 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
1888 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
1889}
1890
1891
1892/** Opcode 0x0f 0x4a. */
1893FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
1894{
1895 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
1896 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
1897}
1898
1899
1900/** Opcode 0x0f 0x4b. */
1901FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
1902{
1903 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
1904 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
1905}
1906
1907
1908/** Opcode 0x0f 0x4c. */
1909FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
1910{
1911 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
1912 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
1913}
1914
1915
1916/** Opcode 0x0f 0x4d. */
1917FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
1918{
1919 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
1920 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
1921}
1922
1923
1924/** Opcode 0x0f 0x4e. */
1925FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
1926{
1927 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
1928 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1929}
1930
1931
1932/** Opcode 0x0f 0x4f. */
1933FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
1934{
1935 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
1936 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1937}
1938
1939#undef CMOV_X
1940
1941/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
1942FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
1943/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
1944FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
1945/* Opcode 0xf3 0x0f 0x50 - invalid */
1946/* Opcode 0xf2 0x0f 0x50 - invalid */
1947
1948/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
1949FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
1950/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
1951FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
1952/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
1953FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
1954/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
1955FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
1956
1957/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
1958FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
1959/* Opcode 0x66 0x0f 0x52 - invalid */
1960/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
1961FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
1962/* Opcode 0xf2 0x0f 0x52 - invalid */
1963
1964/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
1965FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
1966/* Opcode 0x66 0x0f 0x53 - invalid */
1967/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
1968FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
1969/* Opcode 0xf2 0x0f 0x53 - invalid */
1970
1971/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
1972FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
1973/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
1974FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
1975/* Opcode 0xf3 0x0f 0x54 - invalid */
1976/* Opcode 0xf2 0x0f 0x54 - invalid */
1977
1978/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
1979FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
1980/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
1981FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
1982/* Opcode 0xf3 0x0f 0x55 - invalid */
1983/* Opcode 0xf2 0x0f 0x55 - invalid */
1984
1985/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
1986FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
1987/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
1988FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
1989/* Opcode 0xf3 0x0f 0x56 - invalid */
1990/* Opcode 0xf2 0x0f 0x56 - invalid */
1991
1992/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
1993FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
1994/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
1995FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
1996/* Opcode 0xf3 0x0f 0x57 - invalid */
1997/* Opcode 0xf2 0x0f 0x57 - invalid */
1998
1999/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
2000FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2001/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
2002FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2003/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
2004FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2005/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
2006FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2007
2008/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
2009FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2010/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
2011FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2012/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
2013FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2014/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
2015FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2016
2017/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
2018FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2019/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
2020FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2021/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
2022FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2023/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2024FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2025
2026/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
2027FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2028/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
2029FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2030/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
2031FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2032/* Opcode 0xf2 0x0f 0x5b - invalid */
2033
2034/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
2035FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2036/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
2037FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2038/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
2039FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2040/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
2041FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2042
2043/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
2044FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2045/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
2046FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2047/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
2048FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2049/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
2050FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2051
2052/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
2053FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2054/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
2055FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2056/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
2057FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2058/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
2059FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2060
2061/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
2062FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2063/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
2064FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2065/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
2066FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2067/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
2068FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2069
2070/**
2071 * Common worker for MMX instructions on the forms:
2072 * pxxxx mm1, mm2/mem32
2073 *
2074 * The 2nd operand is the first half of a register, which in the memory case
2075 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2076 * memory accessed for MMX.
2077 *
2078 * Exceptions type 4.
2079 */
2080FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2081{
2082 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2083 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2084 {
2085 /*
2086 * Register, register.
2087 */
2088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2089 IEM_MC_BEGIN(2, 0);
2090 IEM_MC_ARG(uint128_t *, pDst, 0);
2091 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2092 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2093 IEM_MC_PREPARE_SSE_USAGE();
2094 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2095 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2096 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2097 IEM_MC_ADVANCE_RIP();
2098 IEM_MC_END();
2099 }
2100 else
2101 {
2102 /*
2103 * Register, memory.
2104 */
2105 IEM_MC_BEGIN(2, 2);
2106 IEM_MC_ARG(uint128_t *, pDst, 0);
2107 IEM_MC_LOCAL(uint64_t, uSrc);
2108 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2110
2111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2113 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2114 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2115
2116 IEM_MC_PREPARE_SSE_USAGE();
2117 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2118 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2119
2120 IEM_MC_ADVANCE_RIP();
2121 IEM_MC_END();
2122 }
2123 return VINF_SUCCESS;
2124}
2125
2126
2127/**
2128 * Common worker for SSE2 instructions on the forms:
2129 * pxxxx xmm1, xmm2/mem128
2130 *
2131 * The 2nd operand is the first half of a register, which in the memory case
2132 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2133 * memory accessed for MMX.
2134 *
2135 * Exceptions type 4.
2136 */
2137FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2138{
2139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2140 if (!pImpl->pfnU64)
2141 return IEMOP_RAISE_INVALID_OPCODE();
2142 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2143 {
2144 /*
2145 * Register, register.
2146 */
2147 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2148 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2150 IEM_MC_BEGIN(2, 0);
2151 IEM_MC_ARG(uint64_t *, pDst, 0);
2152 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2153 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2154 IEM_MC_PREPARE_FPU_USAGE();
2155 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2156 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2157 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2158 IEM_MC_ADVANCE_RIP();
2159 IEM_MC_END();
2160 }
2161 else
2162 {
2163 /*
2164 * Register, memory.
2165 */
2166 IEM_MC_BEGIN(2, 2);
2167 IEM_MC_ARG(uint64_t *, pDst, 0);
2168 IEM_MC_LOCAL(uint32_t, uSrc);
2169 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2171
2172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2174 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2175 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2176
2177 IEM_MC_PREPARE_FPU_USAGE();
2178 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2179 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2180
2181 IEM_MC_ADVANCE_RIP();
2182 IEM_MC_END();
2183 }
2184 return VINF_SUCCESS;
2185}
2186
2187
2188/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2189FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2190{
2191 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2192 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2193}
2194
2195/** Opcode 0x66 0x0f 0x60 - vpunpcklbw Vx, Hx, W */
2196FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2197{
2198 IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
2199 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2200}
2201
2202/* Opcode 0xf3 0x0f 0x60 - invalid */
2203
2204
2205/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2206FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2207{
2208 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2209 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2210}
2211
2212/** Opcode 0x66 0x0f 0x61 - vpunpcklwd Vx, Hx, Wx */
2213FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2214{
2215 IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
2216 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2217}
2218
2219/* Opcode 0xf3 0x0f 0x61 - invalid */
2220
2221
2222/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2223FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2224{
2225 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2226 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2227}
2228
2229/** Opcode 0x66 0x0f 0x62 - vpunpckldq Vx, Hx, Wx */
2230FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2231{
2232 IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
2233 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2234}
2235
2236/* Opcode 0xf3 0x0f 0x62 - invalid */
2237
2238
2239
2240/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2241FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2242/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2243FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2244/* Opcode 0xf3 0x0f 0x63 - invalid */
2245
2246/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2247FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2248/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2249FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2250/* Opcode 0xf3 0x0f 0x64 - invalid */
2251
2252/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2253FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2254/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2255FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2256/* Opcode 0xf3 0x0f 0x65 - invalid */
2257
2258/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2259FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2260/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2261FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2262/* Opcode 0xf3 0x0f 0x66 - invalid */
2263
2264/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2265FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2266/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2267FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2268/* Opcode 0xf3 0x0f 0x67 - invalid */
2269
2270
2271/**
2272 * Common worker for MMX instructions on the form:
2273 * pxxxx mm1, mm2/mem64
2274 *
2275 * The 2nd operand is the second half of a register, which in the memory case
2276 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2277 * where it may read the full 128 bits or only the upper 64 bits.
2278 *
2279 * Exceptions type 4.
2280 */
2281FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2282{
2283 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2284 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2285 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2286 {
2287 /*
2288 * Register, register.
2289 */
2290 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2291 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2293 IEM_MC_BEGIN(2, 0);
2294 IEM_MC_ARG(uint64_t *, pDst, 0);
2295 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2296 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2297 IEM_MC_PREPARE_FPU_USAGE();
2298 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2299 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2300 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2301 IEM_MC_ADVANCE_RIP();
2302 IEM_MC_END();
2303 }
2304 else
2305 {
2306 /*
2307 * Register, memory.
2308 */
2309 IEM_MC_BEGIN(2, 2);
2310 IEM_MC_ARG(uint64_t *, pDst, 0);
2311 IEM_MC_LOCAL(uint64_t, uSrc);
2312 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2314
2315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2317 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2318 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2319
2320 IEM_MC_PREPARE_FPU_USAGE();
2321 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2322 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2323
2324 IEM_MC_ADVANCE_RIP();
2325 IEM_MC_END();
2326 }
2327 return VINF_SUCCESS;
2328}
2329
2330
2331/**
2332 * Common worker for SSE2 instructions on the form:
2333 * pxxxx xmm1, xmm2/mem128
2334 *
2335 * The 2nd operand is the second half of a register, which in the memory case
2336 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2337 * where it may read the full 128 bits or only the upper 64 bits.
2338 *
2339 * Exceptions type 4.
2340 */
2341FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2342{
2343 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2344 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2345 {
2346 /*
2347 * Register, register.
2348 */
2349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2350 IEM_MC_BEGIN(2, 0);
2351 IEM_MC_ARG(uint128_t *, pDst, 0);
2352 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2353 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2354 IEM_MC_PREPARE_SSE_USAGE();
2355 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2356 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2357 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2358 IEM_MC_ADVANCE_RIP();
2359 IEM_MC_END();
2360 }
2361 else
2362 {
2363 /*
2364 * Register, memory.
2365 */
2366 IEM_MC_BEGIN(2, 2);
2367 IEM_MC_ARG(uint128_t *, pDst, 0);
2368 IEM_MC_LOCAL(uint128_t, uSrc);
2369 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2371
2372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2374 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2375 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2376
2377 IEM_MC_PREPARE_SSE_USAGE();
2378 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2379 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2380
2381 IEM_MC_ADVANCE_RIP();
2382 IEM_MC_END();
2383 }
2384 return VINF_SUCCESS;
2385}
2386
2387
2388/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2389FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2390{
2391 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2392 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2393}
2394
2395/** Opcode 0x66 0x0f 0x68 - vpunpckhbw Vx, Hx, Wx */
2396FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2397{
2398 IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx");
2399 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2400}
2401/* Opcode 0xf3 0x0f 0x68 - invalid */
2402
2403
2404/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2405FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2406{
2407 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2408 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2409}
2410
2411/** Opcode 0x66 0x0f 0x69 - vpunpckhwd Vx, Hx, Wx */
2412FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2413{
2414 IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx");
2415 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2416
2417}
2418/* Opcode 0xf3 0x0f 0x69 - invalid */
2419
2420
2421/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2422FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2423{
2424 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2425 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2426}
2427
2428/** Opcode 0x66 0x0f 0x6a - vpunpckhdq Vx, Hx, W */
2429FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2430{
2431 IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W");
2432 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2433}
2434/* Opcode 0xf3 0x0f 0x6a - invalid */
2435
2436
2437/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2438FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2439/** Opcode 0x66 0x0f 0x6b - vpackssdw Vx, Hx, Wx */
2440FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx);
2441/* Opcode 0xf3 0x0f 0x6b - invalid */
2442
2443
2444/* Opcode 0x0f 0x6c - invalid */
2445
2446/** Opcode 0x66 0x0f 0x6c - vpunpcklqdq Vx, Hx, Wx */
2447FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2448{
2449 IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
2450 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2451}
2452
2453/* Opcode 0xf3 0x0f 0x6c - invalid */
2454/* Opcode 0xf2 0x0f 0x6c - invalid */
2455
2456
2457/* Opcode 0x0f 0x6d - invalid */
2458
2459/** Opcode 0x66 0x0f 0x6d - vpunpckhqdq Vx, Hx, W */
2460FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
2461{
2462 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2463 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2464}
2465
2466/* Opcode 0xf3 0x0f 0x6d - invalid */
2467
2468
2469/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
2470FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
2471{
2472 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2473 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2474 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2475 else
2476 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2477 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2478 {
2479 /* MMX, greg */
2480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2481 IEM_MC_BEGIN(0, 1);
2482 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2483 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2484 IEM_MC_LOCAL(uint64_t, u64Tmp);
2485 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2486 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2487 else
2488 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2489 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2490 IEM_MC_ADVANCE_RIP();
2491 IEM_MC_END();
2492 }
2493 else
2494 {
2495 /* MMX, [mem] */
2496 IEM_MC_BEGIN(0, 2);
2497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2498 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2501 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2502 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2503 {
2504 IEM_MC_LOCAL(uint64_t, u64Tmp);
2505 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2506 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2507 }
2508 else
2509 {
2510 IEM_MC_LOCAL(uint32_t, u32Tmp);
2511 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2512 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2513 }
2514 IEM_MC_ADVANCE_RIP();
2515 IEM_MC_END();
2516 }
2517 return VINF_SUCCESS;
2518}
2519
2520/** Opcode 0x66 0x0f 0x6e - vmovd/q Vy, Ey */
2521FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
2522{
2523 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2524 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2525 IEMOP_MNEMONIC(vmovdq_Wq_Eq, "vmovq Wq,Eq");
2526 else
2527 IEMOP_MNEMONIC(vmovdq_Wd_Ed, "vmovd Wd,Ed");
2528 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2529 {
2530 /* XMM, greg*/
2531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2532 IEM_MC_BEGIN(0, 1);
2533 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2534 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2535 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2536 {
2537 IEM_MC_LOCAL(uint64_t, u64Tmp);
2538 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2539 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2540 }
2541 else
2542 {
2543 IEM_MC_LOCAL(uint32_t, u32Tmp);
2544 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2545 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2546 }
2547 IEM_MC_ADVANCE_RIP();
2548 IEM_MC_END();
2549 }
2550 else
2551 {
2552 /* XMM, [mem] */
2553 IEM_MC_BEGIN(0, 2);
2554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2555 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2558 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2559 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2560 {
2561 IEM_MC_LOCAL(uint64_t, u64Tmp);
2562 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2563 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2564 }
2565 else
2566 {
2567 IEM_MC_LOCAL(uint32_t, u32Tmp);
2568 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2569 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2570 }
2571 IEM_MC_ADVANCE_RIP();
2572 IEM_MC_END();
2573 }
2574 return VINF_SUCCESS;
2575}
2576
2577/* Opcode 0xf3 0x0f 0x6e - invalid */
2578
2579
2580/** Opcode 0x0f 0x6f - movq Pq, Qq */
2581FNIEMOP_DEF(iemOp_movq_Pq_Qq)
2582{
2583 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2584 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
2585 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2586 {
2587 /*
2588 * Register, register.
2589 */
2590 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2591 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2593 IEM_MC_BEGIN(0, 1);
2594 IEM_MC_LOCAL(uint64_t, u64Tmp);
2595 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2596 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2597 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2598 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2599 IEM_MC_ADVANCE_RIP();
2600 IEM_MC_END();
2601 }
2602 else
2603 {
2604 /*
2605 * Register, memory.
2606 */
2607 IEM_MC_BEGIN(0, 2);
2608 IEM_MC_LOCAL(uint64_t, u64Tmp);
2609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2610
2611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2613 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2614 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2615 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2616 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2617
2618 IEM_MC_ADVANCE_RIP();
2619 IEM_MC_END();
2620 }
2621 return VINF_SUCCESS;
2622}
2623
2624/** Opcode 0x66 0x0f 0x6f - vmovdqa Vx, Wx */
2625FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
2626{
2627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2628 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
2629 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2630 {
2631 /*
2632 * Register, register.
2633 */
2634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2635 IEM_MC_BEGIN(0, 0);
2636 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2637 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2638 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2639 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2640 IEM_MC_ADVANCE_RIP();
2641 IEM_MC_END();
2642 }
2643 else
2644 {
2645 /*
2646 * Register, memory.
2647 */
2648 IEM_MC_BEGIN(0, 2);
2649 IEM_MC_LOCAL(uint128_t, u128Tmp);
2650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2651
2652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2654 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2655 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2656 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2657 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2658
2659 IEM_MC_ADVANCE_RIP();
2660 IEM_MC_END();
2661 }
2662 return VINF_SUCCESS;
2663}
2664
2665/** Opcode 0xf3 0x0f 0x6f - vmovdqu Vx, Wx */
2666FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
2667{
2668 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2669 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
2670 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2671 {
2672 /*
2673 * Register, register.
2674 */
2675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2676 IEM_MC_BEGIN(0, 0);
2677 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2678 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2679 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2680 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2681 IEM_MC_ADVANCE_RIP();
2682 IEM_MC_END();
2683 }
2684 else
2685 {
2686 /*
2687 * Register, memory.
2688 */
2689 IEM_MC_BEGIN(0, 2);
2690 IEM_MC_LOCAL(uint128_t, u128Tmp);
2691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2692
2693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2695 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2696 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2697 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2698 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2699
2700 IEM_MC_ADVANCE_RIP();
2701 IEM_MC_END();
2702 }
2703 return VINF_SUCCESS;
2704}
2705
2706
2707/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
2708FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
2709{
2710 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
2711 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2712 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2713 {
2714 /*
2715 * Register, register.
2716 */
2717 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2719
2720 IEM_MC_BEGIN(3, 0);
2721 IEM_MC_ARG(uint64_t *, pDst, 0);
2722 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2723 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2724 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2725 IEM_MC_PREPARE_FPU_USAGE();
2726 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2727 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2728 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2729 IEM_MC_ADVANCE_RIP();
2730 IEM_MC_END();
2731 }
2732 else
2733 {
2734 /*
2735 * Register, memory.
2736 */
2737 IEM_MC_BEGIN(3, 2);
2738 IEM_MC_ARG(uint64_t *, pDst, 0);
2739 IEM_MC_LOCAL(uint64_t, uSrc);
2740 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2742
2743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2744 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2745 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2747 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2748
2749 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2750 IEM_MC_PREPARE_FPU_USAGE();
2751 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2752 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2753
2754 IEM_MC_ADVANCE_RIP();
2755 IEM_MC_END();
2756 }
2757 return VINF_SUCCESS;
2758}
2759
2760/** Opcode 0x66 0x0f 0x70 - vpshufd Vx, Wx, Ib */
2761FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
2762{
2763 IEMOP_MNEMONIC(vpshufd_Vx_Wx_Ib, "vpshufd Vx,Wx,Ib");
2764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2765 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2766 {
2767 /*
2768 * Register, register.
2769 */
2770 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2772
2773 IEM_MC_BEGIN(3, 0);
2774 IEM_MC_ARG(uint128_t *, pDst, 0);
2775 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2776 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2777 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2778 IEM_MC_PREPARE_SSE_USAGE();
2779 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2780 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2781 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
2782 IEM_MC_ADVANCE_RIP();
2783 IEM_MC_END();
2784 }
2785 else
2786 {
2787 /*
2788 * Register, memory.
2789 */
2790 IEM_MC_BEGIN(3, 2);
2791 IEM_MC_ARG(uint128_t *, pDst, 0);
2792 IEM_MC_LOCAL(uint128_t, uSrc);
2793 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2795
2796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2797 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2798 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2800 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2801
2802 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2803 IEM_MC_PREPARE_SSE_USAGE();
2804 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2805 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
2806
2807 IEM_MC_ADVANCE_RIP();
2808 IEM_MC_END();
2809 }
2810 return VINF_SUCCESS;
2811}
2812
2813/** Opcode 0xf3 0x0f 0x70 - vpshufhw Vx, Wx, Ib */
2814FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
2815{
2816 IEMOP_MNEMONIC(vpshufhw_Vx_Wx_Ib, "vpshufhw Vx,Wx,Ib");
2817 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2818 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2819 {
2820 /*
2821 * Register, register.
2822 */
2823 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2825
2826 IEM_MC_BEGIN(3, 0);
2827 IEM_MC_ARG(uint128_t *, pDst, 0);
2828 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2829 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2830 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2831 IEM_MC_PREPARE_SSE_USAGE();
2832 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2833 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2834 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
2835 IEM_MC_ADVANCE_RIP();
2836 IEM_MC_END();
2837 }
2838 else
2839 {
2840 /*
2841 * Register, memory.
2842 */
2843 IEM_MC_BEGIN(3, 2);
2844 IEM_MC_ARG(uint128_t *, pDst, 0);
2845 IEM_MC_LOCAL(uint128_t, uSrc);
2846 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2848
2849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2850 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2851 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2853 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2854
2855 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2856 IEM_MC_PREPARE_SSE_USAGE();
2857 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2858 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
2859
2860 IEM_MC_ADVANCE_RIP();
2861 IEM_MC_END();
2862 }
2863 return VINF_SUCCESS;
2864}
2865
2866/** Opcode 0xf2 0x0f 0x70 - vpshuflw Vx, Wx, Ib */
2867FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
2868{
2869 IEMOP_MNEMONIC(vpshuflw_Vx_Wx_Ib, "vpshuflw Vx,Wx,Ib");
2870 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2871 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2872 {
2873 /*
2874 * Register, register.
2875 */
2876 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2878
2879 IEM_MC_BEGIN(3, 0);
2880 IEM_MC_ARG(uint128_t *, pDst, 0);
2881 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2882 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2883 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2884 IEM_MC_PREPARE_SSE_USAGE();
2885 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2886 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2887 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
2888 IEM_MC_ADVANCE_RIP();
2889 IEM_MC_END();
2890 }
2891 else
2892 {
2893 /*
2894 * Register, memory.
2895 */
2896 IEM_MC_BEGIN(3, 2);
2897 IEM_MC_ARG(uint128_t *, pDst, 0);
2898 IEM_MC_LOCAL(uint128_t, uSrc);
2899 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2901
2902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2903 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2904 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2906 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2907
2908 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2909 IEM_MC_PREPARE_SSE_USAGE();
2910 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2911 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
2912
2913 IEM_MC_ADVANCE_RIP();
2914 IEM_MC_END();
2915 }
2916 return VINF_SUCCESS;
2917}
2918
2919
2920/** Opcode 0x0f 0x71 11/2. */
2921FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2922
2923/** Opcode 0x66 0x0f 0x71 11/2. */
2924FNIEMOP_STUB_1(iemOp_Grp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm);
2925
2926/** Opcode 0x0f 0x71 11/4. */
2927FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2928
2929/** Opcode 0x66 0x0f 0x71 11/4. */
2930FNIEMOP_STUB_1(iemOp_Grp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm);
2931
2932/** Opcode 0x0f 0x71 11/6. */
2933FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2934
2935/** Opcode 0x66 0x0f 0x71 11/6. */
2936FNIEMOP_STUB_1(iemOp_Grp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm);
2937
2938
2939/**
2940 * Group 12 jump table for register variant.
2941 */
2942IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[8*4] =
2943{
2944 /** @todo decode imm8? */
2945 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
2946 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
2947 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
2948 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
2949 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
2950 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
2951 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
2952 /* /7 */ IEMOP_X4(iemOp_InvalidWithRM)
2953};
2954
2955
2956/** Opcode 0x0f 0x71. */
2957FNIEMOP_DEF(iemOp_Grp12)
2958{
2959 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2960 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2961 /* register, register */
2962 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
2963 + pVCpu->iem.s.idxPrefix], bRm);
2964 /** @todo decode SIB, disp, Ib? */
2965 return IEMOP_RAISE_INVALID_OPCODE();
2966}
2967
2968
2969/** Opcode 0x0f 0x72 11/2. */
2970FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
2971
2972/** Opcode 0x66 0x0f 0x72 11/2. */
2973FNIEMOP_STUB_1(iemOp_Grp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm);
2974
2975/** Opcode 0x0f 0x72 11/4. */
2976FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
2977
2978/** Opcode 0x66 0x0f 0x72 11/4. */
2979FNIEMOP_STUB_1(iemOp_Grp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm);
2980
2981/** Opcode 0x0f 0x72 11/6. */
2982FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
2983
2984/** Opcode 0x66 0x0f 0x72 11/6. */
2985FNIEMOP_STUB_1(iemOp_Grp13_vpslld_Hx_Ux_Ib, uint8_t, bRm);
2986
2987
2988/**
2989 * Group 13 jump table for register variant.
2990 */
2991IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[8*4] =
2992{
2993 /** @todo decode imm8? */
2994 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
2995 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
2996 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
2997 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
2998 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
2999 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
3000 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3001 /* /7 */ IEMOP_X4(iemOp_InvalidWithRM)
3002};
3003
3004/** Opcode 0x0f 0x72. */
3005FNIEMOP_DEF(iemOp_Grp13)
3006{
3007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3008 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3009 /* register, register */
3010 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3011 + pVCpu->iem.s.idxPrefix], bRm);
3012 /** @todo decode SIB, disp, Ib? */
3013 return IEMOP_RAISE_INVALID_OPCODE();
3014}
3015
3016
3017/** Opcode 0x0f 0x73 11/2. */
3018FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3019
3020/** Opcode 0x66 0x0f 0x73 11/2. */
3021FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
3022
3023/** Opcode 0x66 0x0f 0x73 11/3. */
3024FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
3025
3026/** Opcode 0x0f 0x73 11/6. */
3027FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3028
3029/** Opcode 0x66 0x0f 0x73 11/6. */
3030FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
3031
3032/** Opcode 0x66 0x0f 0x73 11/7. */
3033FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
3034
3035
3036/** Opcode 0x0f 0x73. */
3037FNIEMOP_DEF(iemOp_Grp14)
3038{
3039 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3040 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3041 return IEMOP_RAISE_INVALID_OPCODE();
3042 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3043 {
3044 case 0: case 1: case 4: case 5:
3045 return IEMOP_RAISE_INVALID_OPCODE();
3046 case 2:
3047 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3048 {
3049 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
3050 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
3051 default: return IEMOP_RAISE_INVALID_OPCODE();
3052 }
3053 case 3:
3054 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3055 {
3056 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
3057 default: return IEMOP_RAISE_INVALID_OPCODE();
3058 }
3059 case 6:
3060 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3061 {
3062 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
3063 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
3064 default: return IEMOP_RAISE_INVALID_OPCODE();
3065 }
3066 case 7:
3067 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3068 {
3069 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
3070 default: return IEMOP_RAISE_INVALID_OPCODE();
3071 }
3072 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3073 }
3074}
3075
3076
3077/**
3078 * Common worker for MMX instructions on the form:
3079 * pxxx mm1, mm2/mem64
3080 */
3081FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3082{
3083 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3084 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3085 {
3086 /*
3087 * Register, register.
3088 */
3089 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3090 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3092 IEM_MC_BEGIN(2, 0);
3093 IEM_MC_ARG(uint64_t *, pDst, 0);
3094 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3095 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3096 IEM_MC_PREPARE_FPU_USAGE();
3097 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3098 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3099 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3100 IEM_MC_ADVANCE_RIP();
3101 IEM_MC_END();
3102 }
3103 else
3104 {
3105 /*
3106 * Register, memory.
3107 */
3108 IEM_MC_BEGIN(2, 2);
3109 IEM_MC_ARG(uint64_t *, pDst, 0);
3110 IEM_MC_LOCAL(uint64_t, uSrc);
3111 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3113
3114 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3116 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3117 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3118
3119 IEM_MC_PREPARE_FPU_USAGE();
3120 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3121 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3122
3123 IEM_MC_ADVANCE_RIP();
3124 IEM_MC_END();
3125 }
3126 return VINF_SUCCESS;
3127}
3128
3129
3130/**
3131 * Common worker for SSE2 instructions on the forms:
3132 * pxxx xmm1, xmm2/mem128
3133 *
3134 * Proper alignment of the 128-bit operand is enforced.
3135 * Exceptions type 4. SSE2 cpuid checks.
3136 */
3137FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3138{
3139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3140 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3141 {
3142 /*
3143 * Register, register.
3144 */
3145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3146 IEM_MC_BEGIN(2, 0);
3147 IEM_MC_ARG(uint128_t *, pDst, 0);
3148 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3149 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3150 IEM_MC_PREPARE_SSE_USAGE();
3151 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3152 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3153 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3154 IEM_MC_ADVANCE_RIP();
3155 IEM_MC_END();
3156 }
3157 else
3158 {
3159 /*
3160 * Register, memory.
3161 */
3162 IEM_MC_BEGIN(2, 2);
3163 IEM_MC_ARG(uint128_t *, pDst, 0);
3164 IEM_MC_LOCAL(uint128_t, uSrc);
3165 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3167
3168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3170 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3171 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3172
3173 IEM_MC_PREPARE_SSE_USAGE();
3174 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3175 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3176
3177 IEM_MC_ADVANCE_RIP();
3178 IEM_MC_END();
3179 }
3180 return VINF_SUCCESS;
3181}
3182
3183
3184/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3185FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3186{
3187 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3188 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3189}
3190
3191/** Opcode 0x66 0x0f 0x74 - vpcmpeqb Vx, Hx, Wx */
3192FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3193{
3194 IEMOP_MNEMONIC(vpcmpeqb, "vpcmpeqb");
3195 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3196}
3197
3198/* Opcode 0xf3 0x0f 0x74 - invalid */
3199/* Opcode 0xf2 0x0f 0x74 - invalid */
3200
3201
3202/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3203FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3204{
3205 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3206 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3207}
3208
3209/** Opcode 0x66 0x0f 0x75 - vpcmpeqw Vx, Hx, Wx */
3210FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3211{
3212 IEMOP_MNEMONIC(vpcmpeqw, "vpcmpeqw");
3213 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3214}
3215
3216/* Opcode 0xf3 0x0f 0x75 - invalid */
3217/* Opcode 0xf2 0x0f 0x75 - invalid */
3218
3219
3220/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3221FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3222{
3223 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3224 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3225}
3226
3227/** Opcode 0x66 0x0f 0x76 - vpcmpeqd Vx, Hx, Wx */
3228FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3229{
3230 IEMOP_MNEMONIC(vpcmpeqd, "vpcmpeqd");
3231 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3232}
3233
3234/* Opcode 0xf3 0x0f 0x76 - invalid */
3235/* Opcode 0xf2 0x0f 0x76 - invalid */
3236
3237
3238/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3239FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3240/* Opcode 0x66 0x0f 0x77 - invalid */
3241/* Opcode 0xf3 0x0f 0x77 - invalid */
3242/* Opcode 0xf2 0x0f 0x77 - invalid */
3243
3244/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3245FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3246/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3247FNIEMOP_STUB(iemOp_AmdGrp17);
3248/* Opcode 0xf3 0x0f 0x78 - invalid */
3249/* Opcode 0xf2 0x0f 0x78 - invalid */
3250
3251/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3252FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3253/* Opcode 0x66 0x0f 0x79 - invalid */
3254/* Opcode 0xf3 0x0f 0x79 - invalid */
3255/* Opcode 0xf2 0x0f 0x79 - invalid */
3256
3257/* Opcode 0x0f 0x7a - invalid */
3258/* Opcode 0x66 0x0f 0x7a - invalid */
3259/* Opcode 0xf3 0x0f 0x7a - invalid */
3260/* Opcode 0xf2 0x0f 0x7a - invalid */
3261
3262/* Opcode 0x0f 0x7b - invalid */
3263/* Opcode 0x66 0x0f 0x7b - invalid */
3264/* Opcode 0xf3 0x0f 0x7b - invalid */
3265/* Opcode 0xf2 0x0f 0x7b - invalid */
3266
3267/* Opcode 0x0f 0x7c - invalid */
3268/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3269FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3270/* Opcode 0xf3 0x0f 0x7c - invalid */
3271/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3272FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3273
3274/* Opcode 0x0f 0x7d - invalid */
3275/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3276FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3277/* Opcode 0xf3 0x0f 0x7d - invalid */
3278/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3279FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3280
3281
3282/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
3283FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
3284{
3285 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3286 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3287 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3288 else
3289 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3290 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3291 {
3292 /* greg, MMX */
3293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3294 IEM_MC_BEGIN(0, 1);
3295 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3296 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3297 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3298 {
3299 IEM_MC_LOCAL(uint64_t, u64Tmp);
3300 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3301 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3302 }
3303 else
3304 {
3305 IEM_MC_LOCAL(uint32_t, u32Tmp);
3306 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3307 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3308 }
3309 IEM_MC_ADVANCE_RIP();
3310 IEM_MC_END();
3311 }
3312 else
3313 {
3314 /* [mem], MMX */
3315 IEM_MC_BEGIN(0, 2);
3316 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3317 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3320 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3321 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3322 {
3323 IEM_MC_LOCAL(uint64_t, u64Tmp);
3324 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3325 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3326 }
3327 else
3328 {
3329 IEM_MC_LOCAL(uint32_t, u32Tmp);
3330 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3331 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3332 }
3333 IEM_MC_ADVANCE_RIP();
3334 IEM_MC_END();
3335 }
3336 return VINF_SUCCESS;
3337}
3338
3339/** Opcode 0x66 0x0f 0x7e - vmovd_q Ey, Vy */
3340FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
3341{
3342 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3343 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3344 IEMOP_MNEMONIC(vmovq_Eq_Wq, "vmovq Eq,Wq");
3345 else
3346 IEMOP_MNEMONIC(vmovd_Ed_Wd, "vmovd Ed,Wd");
3347 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3348 {
3349 /* greg, XMM */
3350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3351 IEM_MC_BEGIN(0, 1);
3352 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3353 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3354 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3355 {
3356 IEM_MC_LOCAL(uint64_t, u64Tmp);
3357 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3358 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3359 }
3360 else
3361 {
3362 IEM_MC_LOCAL(uint32_t, u32Tmp);
3363 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3364 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3365 }
3366 IEM_MC_ADVANCE_RIP();
3367 IEM_MC_END();
3368 }
3369 else
3370 {
3371 /* [mem], XMM */
3372 IEM_MC_BEGIN(0, 2);
3373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3374 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3377 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3378 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3379 {
3380 IEM_MC_LOCAL(uint64_t, u64Tmp);
3381 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3382 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3383 }
3384 else
3385 {
3386 IEM_MC_LOCAL(uint32_t, u32Tmp);
3387 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3388 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3389 }
3390 IEM_MC_ADVANCE_RIP();
3391 IEM_MC_END();
3392 }
3393 return VINF_SUCCESS;
3394}
3395
3396/** Opcode 0xf3 0x0f 0x7e - vmovq Vq, Wq */
3397FNIEMOP_STUB(iemOp_vmovq_Vq_Wq);
3398/* Opcode 0xf2 0x0f 0x7e - invalid */
3399
3400
3401/** Opcode 0x0f 0x7f - movq Qq, Pq */
3402FNIEMOP_DEF(iemOp_movq_Qq_Pq)
3403{
3404 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3406 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3407 {
3408 /*
3409 * Register, register.
3410 */
3411 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3412 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3414 IEM_MC_BEGIN(0, 1);
3415 IEM_MC_LOCAL(uint64_t, u64Tmp);
3416 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3417 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3418 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3419 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3420 IEM_MC_ADVANCE_RIP();
3421 IEM_MC_END();
3422 }
3423 else
3424 {
3425 /*
3426 * Register, memory.
3427 */
3428 IEM_MC_BEGIN(0, 2);
3429 IEM_MC_LOCAL(uint64_t, u64Tmp);
3430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3431
3432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3434 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3435 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3436
3437 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3438 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3439
3440 IEM_MC_ADVANCE_RIP();
3441 IEM_MC_END();
3442 }
3443 return VINF_SUCCESS;
3444}
3445
3446/** Opcode 0x66 0x0f 0x7f - vmovdqa Wx,Vx */
3447FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
3448{
3449 IEMOP_MNEMONIC(vmovdqa_Wdq_Vdq, "vmovdqa Wx,Vx");
3450 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3451 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3452 {
3453 /*
3454 * Register, register.
3455 */
3456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3457 IEM_MC_BEGIN(0, 0);
3458 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3459 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3460 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3461 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3462 IEM_MC_ADVANCE_RIP();
3463 IEM_MC_END();
3464 }
3465 else
3466 {
3467 /*
3468 * Register, memory.
3469 */
3470 IEM_MC_BEGIN(0, 2);
3471 IEM_MC_LOCAL(uint128_t, u128Tmp);
3472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3473
3474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3476 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3477 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3478
3479 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3480 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3481
3482 IEM_MC_ADVANCE_RIP();
3483 IEM_MC_END();
3484 }
3485 return VINF_SUCCESS;
3486}
3487
3488/** Opcode 0xf3 0x0f 0x7f - vmovdqu Wx,Vx */
3489FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
3490{
3491 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3492 IEMOP_MNEMONIC(vmovdqu_Wdq_Vdq, "vmovdqu Wx,Vx");
3493 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3494 {
3495 /*
3496 * Register, register.
3497 */
3498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3499 IEM_MC_BEGIN(0, 0);
3500 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3501 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3502 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3503 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3504 IEM_MC_ADVANCE_RIP();
3505 IEM_MC_END();
3506 }
3507 else
3508 {
3509 /*
3510 * Register, memory.
3511 */
3512 IEM_MC_BEGIN(0, 2);
3513 IEM_MC_LOCAL(uint128_t, u128Tmp);
3514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3515
3516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3518 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3519 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3520
3521 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3522 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3523
3524 IEM_MC_ADVANCE_RIP();
3525 IEM_MC_END();
3526 }
3527 return VINF_SUCCESS;
3528}
3529
3530/* Opcode 0xf2 0x0f 0x7f - invalid */
3531
3532
3533
3534/** Opcode 0x0f 0x80. */
3535FNIEMOP_DEF(iemOp_jo_Jv)
3536{
3537 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3538 IEMOP_HLP_MIN_386();
3539 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3540 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3541 {
3542 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3544
3545 IEM_MC_BEGIN(0, 0);
3546 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3547 IEM_MC_REL_JMP_S16(i16Imm);
3548 } IEM_MC_ELSE() {
3549 IEM_MC_ADVANCE_RIP();
3550 } IEM_MC_ENDIF();
3551 IEM_MC_END();
3552 }
3553 else
3554 {
3555 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3557
3558 IEM_MC_BEGIN(0, 0);
3559 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3560 IEM_MC_REL_JMP_S32(i32Imm);
3561 } IEM_MC_ELSE() {
3562 IEM_MC_ADVANCE_RIP();
3563 } IEM_MC_ENDIF();
3564 IEM_MC_END();
3565 }
3566 return VINF_SUCCESS;
3567}
3568
3569
3570/** Opcode 0x0f 0x81. */
3571FNIEMOP_DEF(iemOp_jno_Jv)
3572{
3573 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3574 IEMOP_HLP_MIN_386();
3575 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3576 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3577 {
3578 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3580
3581 IEM_MC_BEGIN(0, 0);
3582 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3583 IEM_MC_ADVANCE_RIP();
3584 } IEM_MC_ELSE() {
3585 IEM_MC_REL_JMP_S16(i16Imm);
3586 } IEM_MC_ENDIF();
3587 IEM_MC_END();
3588 }
3589 else
3590 {
3591 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3593
3594 IEM_MC_BEGIN(0, 0);
3595 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3596 IEM_MC_ADVANCE_RIP();
3597 } IEM_MC_ELSE() {
3598 IEM_MC_REL_JMP_S32(i32Imm);
3599 } IEM_MC_ENDIF();
3600 IEM_MC_END();
3601 }
3602 return VINF_SUCCESS;
3603}
3604
3605
3606/** Opcode 0x0f 0x82. */
3607FNIEMOP_DEF(iemOp_jc_Jv)
3608{
3609 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3610 IEMOP_HLP_MIN_386();
3611 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3612 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3613 {
3614 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3616
3617 IEM_MC_BEGIN(0, 0);
3618 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3619 IEM_MC_REL_JMP_S16(i16Imm);
3620 } IEM_MC_ELSE() {
3621 IEM_MC_ADVANCE_RIP();
3622 } IEM_MC_ENDIF();
3623 IEM_MC_END();
3624 }
3625 else
3626 {
3627 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3629
3630 IEM_MC_BEGIN(0, 0);
3631 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3632 IEM_MC_REL_JMP_S32(i32Imm);
3633 } IEM_MC_ELSE() {
3634 IEM_MC_ADVANCE_RIP();
3635 } IEM_MC_ENDIF();
3636 IEM_MC_END();
3637 }
3638 return VINF_SUCCESS;
3639}
3640
3641
3642/** Opcode 0x0f 0x83. */
3643FNIEMOP_DEF(iemOp_jnc_Jv)
3644{
3645 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
3646 IEMOP_HLP_MIN_386();
3647 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3648 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3649 {
3650 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3652
3653 IEM_MC_BEGIN(0, 0);
3654 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3655 IEM_MC_ADVANCE_RIP();
3656 } IEM_MC_ELSE() {
3657 IEM_MC_REL_JMP_S16(i16Imm);
3658 } IEM_MC_ENDIF();
3659 IEM_MC_END();
3660 }
3661 else
3662 {
3663 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3665
3666 IEM_MC_BEGIN(0, 0);
3667 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3668 IEM_MC_ADVANCE_RIP();
3669 } IEM_MC_ELSE() {
3670 IEM_MC_REL_JMP_S32(i32Imm);
3671 } IEM_MC_ENDIF();
3672 IEM_MC_END();
3673 }
3674 return VINF_SUCCESS;
3675}
3676
3677
3678/** Opcode 0x0f 0x84. */
3679FNIEMOP_DEF(iemOp_je_Jv)
3680{
3681 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
3682 IEMOP_HLP_MIN_386();
3683 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3684 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3685 {
3686 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3688
3689 IEM_MC_BEGIN(0, 0);
3690 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3691 IEM_MC_REL_JMP_S16(i16Imm);
3692 } IEM_MC_ELSE() {
3693 IEM_MC_ADVANCE_RIP();
3694 } IEM_MC_ENDIF();
3695 IEM_MC_END();
3696 }
3697 else
3698 {
3699 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3701
3702 IEM_MC_BEGIN(0, 0);
3703 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3704 IEM_MC_REL_JMP_S32(i32Imm);
3705 } IEM_MC_ELSE() {
3706 IEM_MC_ADVANCE_RIP();
3707 } IEM_MC_ENDIF();
3708 IEM_MC_END();
3709 }
3710 return VINF_SUCCESS;
3711}
3712
3713
3714/** Opcode 0x0f 0x85. */
3715FNIEMOP_DEF(iemOp_jne_Jv)
3716{
3717 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
3718 IEMOP_HLP_MIN_386();
3719 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3720 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3721 {
3722 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3724
3725 IEM_MC_BEGIN(0, 0);
3726 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3727 IEM_MC_ADVANCE_RIP();
3728 } IEM_MC_ELSE() {
3729 IEM_MC_REL_JMP_S16(i16Imm);
3730 } IEM_MC_ENDIF();
3731 IEM_MC_END();
3732 }
3733 else
3734 {
3735 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3737
3738 IEM_MC_BEGIN(0, 0);
3739 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3740 IEM_MC_ADVANCE_RIP();
3741 } IEM_MC_ELSE() {
3742 IEM_MC_REL_JMP_S32(i32Imm);
3743 } IEM_MC_ENDIF();
3744 IEM_MC_END();
3745 }
3746 return VINF_SUCCESS;
3747}
3748
3749
3750/** Opcode 0x0f 0x86. */
3751FNIEMOP_DEF(iemOp_jbe_Jv)
3752{
3753 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
3754 IEMOP_HLP_MIN_386();
3755 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3756 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3757 {
3758 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3760
3761 IEM_MC_BEGIN(0, 0);
3762 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3763 IEM_MC_REL_JMP_S16(i16Imm);
3764 } IEM_MC_ELSE() {
3765 IEM_MC_ADVANCE_RIP();
3766 } IEM_MC_ENDIF();
3767 IEM_MC_END();
3768 }
3769 else
3770 {
3771 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3773
3774 IEM_MC_BEGIN(0, 0);
3775 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3776 IEM_MC_REL_JMP_S32(i32Imm);
3777 } IEM_MC_ELSE() {
3778 IEM_MC_ADVANCE_RIP();
3779 } IEM_MC_ENDIF();
3780 IEM_MC_END();
3781 }
3782 return VINF_SUCCESS;
3783}
3784
3785
3786/** Opcode 0x0f 0x87. */
3787FNIEMOP_DEF(iemOp_jnbe_Jv)
3788{
3789 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
3790 IEMOP_HLP_MIN_386();
3791 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3792 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3793 {
3794 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3796
3797 IEM_MC_BEGIN(0, 0);
3798 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3799 IEM_MC_ADVANCE_RIP();
3800 } IEM_MC_ELSE() {
3801 IEM_MC_REL_JMP_S16(i16Imm);
3802 } IEM_MC_ENDIF();
3803 IEM_MC_END();
3804 }
3805 else
3806 {
3807 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3809
3810 IEM_MC_BEGIN(0, 0);
3811 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3812 IEM_MC_ADVANCE_RIP();
3813 } IEM_MC_ELSE() {
3814 IEM_MC_REL_JMP_S32(i32Imm);
3815 } IEM_MC_ENDIF();
3816 IEM_MC_END();
3817 }
3818 return VINF_SUCCESS;
3819}
3820
3821
3822/** Opcode 0x0f 0x88. */
3823FNIEMOP_DEF(iemOp_js_Jv)
3824{
3825 IEMOP_MNEMONIC(js_Jv, "js Jv");
3826 IEMOP_HLP_MIN_386();
3827 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3828 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3829 {
3830 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3832
3833 IEM_MC_BEGIN(0, 0);
3834 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3835 IEM_MC_REL_JMP_S16(i16Imm);
3836 } IEM_MC_ELSE() {
3837 IEM_MC_ADVANCE_RIP();
3838 } IEM_MC_ENDIF();
3839 IEM_MC_END();
3840 }
3841 else
3842 {
3843 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3845
3846 IEM_MC_BEGIN(0, 0);
3847 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3848 IEM_MC_REL_JMP_S32(i32Imm);
3849 } IEM_MC_ELSE() {
3850 IEM_MC_ADVANCE_RIP();
3851 } IEM_MC_ENDIF();
3852 IEM_MC_END();
3853 }
3854 return VINF_SUCCESS;
3855}
3856
3857
3858/** Opcode 0x0f 0x89. */
3859FNIEMOP_DEF(iemOp_jns_Jv)
3860{
3861 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
3862 IEMOP_HLP_MIN_386();
3863 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3864 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3865 {
3866 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3868
3869 IEM_MC_BEGIN(0, 0);
3870 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3871 IEM_MC_ADVANCE_RIP();
3872 } IEM_MC_ELSE() {
3873 IEM_MC_REL_JMP_S16(i16Imm);
3874 } IEM_MC_ENDIF();
3875 IEM_MC_END();
3876 }
3877 else
3878 {
3879 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3881
3882 IEM_MC_BEGIN(0, 0);
3883 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3884 IEM_MC_ADVANCE_RIP();
3885 } IEM_MC_ELSE() {
3886 IEM_MC_REL_JMP_S32(i32Imm);
3887 } IEM_MC_ENDIF();
3888 IEM_MC_END();
3889 }
3890 return VINF_SUCCESS;
3891}
3892
3893
3894/** Opcode 0x0f 0x8a. */
3895FNIEMOP_DEF(iemOp_jp_Jv)
3896{
3897 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
3898 IEMOP_HLP_MIN_386();
3899 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3900 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3901 {
3902 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3904
3905 IEM_MC_BEGIN(0, 0);
3906 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3907 IEM_MC_REL_JMP_S16(i16Imm);
3908 } IEM_MC_ELSE() {
3909 IEM_MC_ADVANCE_RIP();
3910 } IEM_MC_ENDIF();
3911 IEM_MC_END();
3912 }
3913 else
3914 {
3915 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3917
3918 IEM_MC_BEGIN(0, 0);
3919 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3920 IEM_MC_REL_JMP_S32(i32Imm);
3921 } IEM_MC_ELSE() {
3922 IEM_MC_ADVANCE_RIP();
3923 } IEM_MC_ENDIF();
3924 IEM_MC_END();
3925 }
3926 return VINF_SUCCESS;
3927}
3928
3929
3930/** Opcode 0x0f 0x8b. */
3931FNIEMOP_DEF(iemOp_jnp_Jv)
3932{
3933 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
3934 IEMOP_HLP_MIN_386();
3935 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3936 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3937 {
3938 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3940
3941 IEM_MC_BEGIN(0, 0);
3942 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3943 IEM_MC_ADVANCE_RIP();
3944 } IEM_MC_ELSE() {
3945 IEM_MC_REL_JMP_S16(i16Imm);
3946 } IEM_MC_ENDIF();
3947 IEM_MC_END();
3948 }
3949 else
3950 {
3951 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3953
3954 IEM_MC_BEGIN(0, 0);
3955 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3956 IEM_MC_ADVANCE_RIP();
3957 } IEM_MC_ELSE() {
3958 IEM_MC_REL_JMP_S32(i32Imm);
3959 } IEM_MC_ENDIF();
3960 IEM_MC_END();
3961 }
3962 return VINF_SUCCESS;
3963}
3964
3965
3966/** Opcode 0x0f 0x8c. */
3967FNIEMOP_DEF(iemOp_jl_Jv)
3968{
3969 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
3970 IEMOP_HLP_MIN_386();
3971 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3972 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3973 {
3974 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3976
3977 IEM_MC_BEGIN(0, 0);
3978 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3979 IEM_MC_REL_JMP_S16(i16Imm);
3980 } IEM_MC_ELSE() {
3981 IEM_MC_ADVANCE_RIP();
3982 } IEM_MC_ENDIF();
3983 IEM_MC_END();
3984 }
3985 else
3986 {
3987 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3989
3990 IEM_MC_BEGIN(0, 0);
3991 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3992 IEM_MC_REL_JMP_S32(i32Imm);
3993 } IEM_MC_ELSE() {
3994 IEM_MC_ADVANCE_RIP();
3995 } IEM_MC_ENDIF();
3996 IEM_MC_END();
3997 }
3998 return VINF_SUCCESS;
3999}
4000
4001
4002/** Opcode 0x0f 0x8d. */
4003FNIEMOP_DEF(iemOp_jnl_Jv)
4004{
4005 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4006 IEMOP_HLP_MIN_386();
4007 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4008 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4009 {
4010 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4012
4013 IEM_MC_BEGIN(0, 0);
4014 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4015 IEM_MC_ADVANCE_RIP();
4016 } IEM_MC_ELSE() {
4017 IEM_MC_REL_JMP_S16(i16Imm);
4018 } IEM_MC_ENDIF();
4019 IEM_MC_END();
4020 }
4021 else
4022 {
4023 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4025
4026 IEM_MC_BEGIN(0, 0);
4027 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4028 IEM_MC_ADVANCE_RIP();
4029 } IEM_MC_ELSE() {
4030 IEM_MC_REL_JMP_S32(i32Imm);
4031 } IEM_MC_ENDIF();
4032 IEM_MC_END();
4033 }
4034 return VINF_SUCCESS;
4035}
4036
4037
4038/** Opcode 0x0f 0x8e. */
4039FNIEMOP_DEF(iemOp_jle_Jv)
4040{
4041 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4042 IEMOP_HLP_MIN_386();
4043 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4044 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4045 {
4046 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4048
4049 IEM_MC_BEGIN(0, 0);
4050 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4051 IEM_MC_REL_JMP_S16(i16Imm);
4052 } IEM_MC_ELSE() {
4053 IEM_MC_ADVANCE_RIP();
4054 } IEM_MC_ENDIF();
4055 IEM_MC_END();
4056 }
4057 else
4058 {
4059 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4061
4062 IEM_MC_BEGIN(0, 0);
4063 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4064 IEM_MC_REL_JMP_S32(i32Imm);
4065 } IEM_MC_ELSE() {
4066 IEM_MC_ADVANCE_RIP();
4067 } IEM_MC_ENDIF();
4068 IEM_MC_END();
4069 }
4070 return VINF_SUCCESS;
4071}
4072
4073
4074/** Opcode 0x0f 0x8f. */
4075FNIEMOP_DEF(iemOp_jnle_Jv)
4076{
4077 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4078 IEMOP_HLP_MIN_386();
4079 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4080 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4081 {
4082 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4084
4085 IEM_MC_BEGIN(0, 0);
4086 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4087 IEM_MC_ADVANCE_RIP();
4088 } IEM_MC_ELSE() {
4089 IEM_MC_REL_JMP_S16(i16Imm);
4090 } IEM_MC_ENDIF();
4091 IEM_MC_END();
4092 }
4093 else
4094 {
4095 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4097
4098 IEM_MC_BEGIN(0, 0);
4099 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4100 IEM_MC_ADVANCE_RIP();
4101 } IEM_MC_ELSE() {
4102 IEM_MC_REL_JMP_S32(i32Imm);
4103 } IEM_MC_ENDIF();
4104 IEM_MC_END();
4105 }
4106 return VINF_SUCCESS;
4107}
4108
4109
4110/** Opcode 0x0f 0x90. */
4111FNIEMOP_DEF(iemOp_seto_Eb)
4112{
4113 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4114 IEMOP_HLP_MIN_386();
4115 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4116
4117 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4118 * any way. AMD says it's "unused", whatever that means. We're
4119 * ignoring for now. */
4120 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4121 {
4122 /* register target */
4123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4124 IEM_MC_BEGIN(0, 0);
4125 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4126 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4127 } IEM_MC_ELSE() {
4128 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4129 } IEM_MC_ENDIF();
4130 IEM_MC_ADVANCE_RIP();
4131 IEM_MC_END();
4132 }
4133 else
4134 {
4135 /* memory target */
4136 IEM_MC_BEGIN(0, 1);
4137 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4140 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4141 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4142 } IEM_MC_ELSE() {
4143 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4144 } IEM_MC_ENDIF();
4145 IEM_MC_ADVANCE_RIP();
4146 IEM_MC_END();
4147 }
4148 return VINF_SUCCESS;
4149}
4150
4151
4152/** Opcode 0x0f 0x91. */
4153FNIEMOP_DEF(iemOp_setno_Eb)
4154{
4155 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4156 IEMOP_HLP_MIN_386();
4157 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4158
4159 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4160 * any way. AMD says it's "unused", whatever that means. We're
4161 * ignoring for now. */
4162 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4163 {
4164 /* register target */
4165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4166 IEM_MC_BEGIN(0, 0);
4167 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4168 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4169 } IEM_MC_ELSE() {
4170 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4171 } IEM_MC_ENDIF();
4172 IEM_MC_ADVANCE_RIP();
4173 IEM_MC_END();
4174 }
4175 else
4176 {
4177 /* memory target */
4178 IEM_MC_BEGIN(0, 1);
4179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4180 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4182 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4183 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4184 } IEM_MC_ELSE() {
4185 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4186 } IEM_MC_ENDIF();
4187 IEM_MC_ADVANCE_RIP();
4188 IEM_MC_END();
4189 }
4190 return VINF_SUCCESS;
4191}
4192
4193
4194/** Opcode 0x0f 0x92. */
4195FNIEMOP_DEF(iemOp_setc_Eb)
4196{
4197 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4198 IEMOP_HLP_MIN_386();
4199 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4200
4201 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4202 * any way. AMD says it's "unused", whatever that means. We're
4203 * ignoring for now. */
4204 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4205 {
4206 /* register target */
4207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4208 IEM_MC_BEGIN(0, 0);
4209 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4210 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4211 } IEM_MC_ELSE() {
4212 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4213 } IEM_MC_ENDIF();
4214 IEM_MC_ADVANCE_RIP();
4215 IEM_MC_END();
4216 }
4217 else
4218 {
4219 /* memory target */
4220 IEM_MC_BEGIN(0, 1);
4221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4224 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4225 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4226 } IEM_MC_ELSE() {
4227 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4228 } IEM_MC_ENDIF();
4229 IEM_MC_ADVANCE_RIP();
4230 IEM_MC_END();
4231 }
4232 return VINF_SUCCESS;
4233}
4234
4235
4236/** Opcode 0x0f 0x93. */
4237FNIEMOP_DEF(iemOp_setnc_Eb)
4238{
4239 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4240 IEMOP_HLP_MIN_386();
4241 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4242
4243 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4244 * any way. AMD says it's "unused", whatever that means. We're
4245 * ignoring for now. */
4246 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4247 {
4248 /* register target */
4249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4250 IEM_MC_BEGIN(0, 0);
4251 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4252 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4253 } IEM_MC_ELSE() {
4254 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4255 } IEM_MC_ENDIF();
4256 IEM_MC_ADVANCE_RIP();
4257 IEM_MC_END();
4258 }
4259 else
4260 {
4261 /* memory target */
4262 IEM_MC_BEGIN(0, 1);
4263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4266 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4267 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4268 } IEM_MC_ELSE() {
4269 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4270 } IEM_MC_ENDIF();
4271 IEM_MC_ADVANCE_RIP();
4272 IEM_MC_END();
4273 }
4274 return VINF_SUCCESS;
4275}
4276
4277
4278/** Opcode 0x0f 0x94. */
4279FNIEMOP_DEF(iemOp_sete_Eb)
4280{
4281 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4282 IEMOP_HLP_MIN_386();
4283 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4284
4285 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4286 * any way. AMD says it's "unused", whatever that means. We're
4287 * ignoring for now. */
4288 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4289 {
4290 /* register target */
4291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4292 IEM_MC_BEGIN(0, 0);
4293 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4294 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4295 } IEM_MC_ELSE() {
4296 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4297 } IEM_MC_ENDIF();
4298 IEM_MC_ADVANCE_RIP();
4299 IEM_MC_END();
4300 }
4301 else
4302 {
4303 /* memory target */
4304 IEM_MC_BEGIN(0, 1);
4305 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4306 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4308 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4309 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4310 } IEM_MC_ELSE() {
4311 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4312 } IEM_MC_ENDIF();
4313 IEM_MC_ADVANCE_RIP();
4314 IEM_MC_END();
4315 }
4316 return VINF_SUCCESS;
4317}
4318
4319
4320/** Opcode 0x0f 0x95. */
4321FNIEMOP_DEF(iemOp_setne_Eb)
4322{
4323 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4324 IEMOP_HLP_MIN_386();
4325 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4326
4327 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4328 * any way. AMD says it's "unused", whatever that means. We're
4329 * ignoring for now. */
4330 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4331 {
4332 /* register target */
4333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4334 IEM_MC_BEGIN(0, 0);
4335 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4336 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4337 } IEM_MC_ELSE() {
4338 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4339 } IEM_MC_ENDIF();
4340 IEM_MC_ADVANCE_RIP();
4341 IEM_MC_END();
4342 }
4343 else
4344 {
4345 /* memory target */
4346 IEM_MC_BEGIN(0, 1);
4347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4350 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4351 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4352 } IEM_MC_ELSE() {
4353 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4354 } IEM_MC_ENDIF();
4355 IEM_MC_ADVANCE_RIP();
4356 IEM_MC_END();
4357 }
4358 return VINF_SUCCESS;
4359}
4360
4361
4362/** Opcode 0x0f 0x96. */
4363FNIEMOP_DEF(iemOp_setbe_Eb)
4364{
4365 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4366 IEMOP_HLP_MIN_386();
4367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4368
4369 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4370 * any way. AMD says it's "unused", whatever that means. We're
4371 * ignoring for now. */
4372 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4373 {
4374 /* register target */
4375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4376 IEM_MC_BEGIN(0, 0);
4377 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4378 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4379 } IEM_MC_ELSE() {
4380 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4381 } IEM_MC_ENDIF();
4382 IEM_MC_ADVANCE_RIP();
4383 IEM_MC_END();
4384 }
4385 else
4386 {
4387 /* memory target */
4388 IEM_MC_BEGIN(0, 1);
4389 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4392 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4393 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4394 } IEM_MC_ELSE() {
4395 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4396 } IEM_MC_ENDIF();
4397 IEM_MC_ADVANCE_RIP();
4398 IEM_MC_END();
4399 }
4400 return VINF_SUCCESS;
4401}
4402
4403
4404/** Opcode 0x0f 0x97. */
4405FNIEMOP_DEF(iemOp_setnbe_Eb)
4406{
4407 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4408 IEMOP_HLP_MIN_386();
4409 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4410
4411 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4412 * any way. AMD says it's "unused", whatever that means. We're
4413 * ignoring for now. */
4414 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4415 {
4416 /* register target */
4417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4418 IEM_MC_BEGIN(0, 0);
4419 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4420 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4421 } IEM_MC_ELSE() {
4422 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4423 } IEM_MC_ENDIF();
4424 IEM_MC_ADVANCE_RIP();
4425 IEM_MC_END();
4426 }
4427 else
4428 {
4429 /* memory target */
4430 IEM_MC_BEGIN(0, 1);
4431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4434 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4435 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4436 } IEM_MC_ELSE() {
4437 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4438 } IEM_MC_ENDIF();
4439 IEM_MC_ADVANCE_RIP();
4440 IEM_MC_END();
4441 }
4442 return VINF_SUCCESS;
4443}
4444
4445
4446/** Opcode 0x0f 0x98. */
4447FNIEMOP_DEF(iemOp_sets_Eb)
4448{
4449 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4450 IEMOP_HLP_MIN_386();
4451 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4452
4453 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4454 * any way. AMD says it's "unused", whatever that means. We're
4455 * ignoring for now. */
4456 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4457 {
4458 /* register target */
4459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4460 IEM_MC_BEGIN(0, 0);
4461 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4462 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4463 } IEM_MC_ELSE() {
4464 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4465 } IEM_MC_ENDIF();
4466 IEM_MC_ADVANCE_RIP();
4467 IEM_MC_END();
4468 }
4469 else
4470 {
4471 /* memory target */
4472 IEM_MC_BEGIN(0, 1);
4473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4476 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4477 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4478 } IEM_MC_ELSE() {
4479 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4480 } IEM_MC_ENDIF();
4481 IEM_MC_ADVANCE_RIP();
4482 IEM_MC_END();
4483 }
4484 return VINF_SUCCESS;
4485}
4486
4487
4488/** Opcode 0x0f 0x99. */
4489FNIEMOP_DEF(iemOp_setns_Eb)
4490{
4491 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4492 IEMOP_HLP_MIN_386();
4493 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4494
4495 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4496 * any way. AMD says it's "unused", whatever that means. We're
4497 * ignoring for now. */
4498 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4499 {
4500 /* register target */
4501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4502 IEM_MC_BEGIN(0, 0);
4503 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4504 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4505 } IEM_MC_ELSE() {
4506 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4507 } IEM_MC_ENDIF();
4508 IEM_MC_ADVANCE_RIP();
4509 IEM_MC_END();
4510 }
4511 else
4512 {
4513 /* memory target */
4514 IEM_MC_BEGIN(0, 1);
4515 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4518 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4519 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4520 } IEM_MC_ELSE() {
4521 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4522 } IEM_MC_ENDIF();
4523 IEM_MC_ADVANCE_RIP();
4524 IEM_MC_END();
4525 }
4526 return VINF_SUCCESS;
4527}
4528
4529
4530/** Opcode 0x0f 0x9a. */
4531FNIEMOP_DEF(iemOp_setp_Eb)
4532{
4533 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4534 IEMOP_HLP_MIN_386();
4535 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4536
4537 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4538 * any way. AMD says it's "unused", whatever that means. We're
4539 * ignoring for now. */
4540 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4541 {
4542 /* register target */
4543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4544 IEM_MC_BEGIN(0, 0);
4545 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4546 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4547 } IEM_MC_ELSE() {
4548 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4549 } IEM_MC_ENDIF();
4550 IEM_MC_ADVANCE_RIP();
4551 IEM_MC_END();
4552 }
4553 else
4554 {
4555 /* memory target */
4556 IEM_MC_BEGIN(0, 1);
4557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4560 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4561 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4562 } IEM_MC_ELSE() {
4563 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4564 } IEM_MC_ENDIF();
4565 IEM_MC_ADVANCE_RIP();
4566 IEM_MC_END();
4567 }
4568 return VINF_SUCCESS;
4569}
4570
4571
4572/** Opcode 0x0f 0x9b. */
4573FNIEMOP_DEF(iemOp_setnp_Eb)
4574{
4575 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4576 IEMOP_HLP_MIN_386();
4577 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4578
4579 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4580 * any way. AMD says it's "unused", whatever that means. We're
4581 * ignoring for now. */
4582 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4583 {
4584 /* register target */
4585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4586 IEM_MC_BEGIN(0, 0);
4587 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4588 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4589 } IEM_MC_ELSE() {
4590 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4591 } IEM_MC_ENDIF();
4592 IEM_MC_ADVANCE_RIP();
4593 IEM_MC_END();
4594 }
4595 else
4596 {
4597 /* memory target */
4598 IEM_MC_BEGIN(0, 1);
4599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4602 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4603 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4604 } IEM_MC_ELSE() {
4605 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4606 } IEM_MC_ENDIF();
4607 IEM_MC_ADVANCE_RIP();
4608 IEM_MC_END();
4609 }
4610 return VINF_SUCCESS;
4611}
4612
4613
4614/** Opcode 0x0f 0x9c. */
4615FNIEMOP_DEF(iemOp_setl_Eb)
4616{
4617 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4618 IEMOP_HLP_MIN_386();
4619 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4620
4621 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4622 * any way. AMD says it's "unused", whatever that means. We're
4623 * ignoring for now. */
4624 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4625 {
4626 /* register target */
4627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4628 IEM_MC_BEGIN(0, 0);
4629 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4630 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4631 } IEM_MC_ELSE() {
4632 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4633 } IEM_MC_ENDIF();
4634 IEM_MC_ADVANCE_RIP();
4635 IEM_MC_END();
4636 }
4637 else
4638 {
4639 /* memory target */
4640 IEM_MC_BEGIN(0, 1);
4641 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4644 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4645 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4646 } IEM_MC_ELSE() {
4647 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4648 } IEM_MC_ENDIF();
4649 IEM_MC_ADVANCE_RIP();
4650 IEM_MC_END();
4651 }
4652 return VINF_SUCCESS;
4653}
4654
4655
4656/** Opcode 0x0f 0x9d. */
4657FNIEMOP_DEF(iemOp_setnl_Eb)
4658{
4659 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
4660 IEMOP_HLP_MIN_386();
4661 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4662
4663 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4664 * any way. AMD says it's "unused", whatever that means. We're
4665 * ignoring for now. */
4666 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4667 {
4668 /* register target */
4669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4670 IEM_MC_BEGIN(0, 0);
4671 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4672 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4673 } IEM_MC_ELSE() {
4674 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4675 } IEM_MC_ENDIF();
4676 IEM_MC_ADVANCE_RIP();
4677 IEM_MC_END();
4678 }
4679 else
4680 {
4681 /* memory target */
4682 IEM_MC_BEGIN(0, 1);
4683 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4686 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4687 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4688 } IEM_MC_ELSE() {
4689 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4690 } IEM_MC_ENDIF();
4691 IEM_MC_ADVANCE_RIP();
4692 IEM_MC_END();
4693 }
4694 return VINF_SUCCESS;
4695}
4696
4697
4698/** Opcode 0x0f 0x9e. */
4699FNIEMOP_DEF(iemOp_setle_Eb)
4700{
4701 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
4702 IEMOP_HLP_MIN_386();
4703 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4704
4705 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4706 * any way. AMD says it's "unused", whatever that means. We're
4707 * ignoring for now. */
4708 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4709 {
4710 /* register target */
4711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4712 IEM_MC_BEGIN(0, 0);
4713 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4714 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4715 } IEM_MC_ELSE() {
4716 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4717 } IEM_MC_ENDIF();
4718 IEM_MC_ADVANCE_RIP();
4719 IEM_MC_END();
4720 }
4721 else
4722 {
4723 /* memory target */
4724 IEM_MC_BEGIN(0, 1);
4725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4728 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4729 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4730 } IEM_MC_ELSE() {
4731 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4732 } IEM_MC_ENDIF();
4733 IEM_MC_ADVANCE_RIP();
4734 IEM_MC_END();
4735 }
4736 return VINF_SUCCESS;
4737}
4738
4739
4740/** Opcode 0x0f 0x9f. */
4741FNIEMOP_DEF(iemOp_setnle_Eb)
4742{
4743 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
4744 IEMOP_HLP_MIN_386();
4745 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4746
4747 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4748 * any way. AMD says it's "unused", whatever that means. We're
4749 * ignoring for now. */
4750 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4751 {
4752 /* register target */
4753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4754 IEM_MC_BEGIN(0, 0);
4755 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4756 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4757 } IEM_MC_ELSE() {
4758 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4759 } IEM_MC_ENDIF();
4760 IEM_MC_ADVANCE_RIP();
4761 IEM_MC_END();
4762 }
4763 else
4764 {
4765 /* memory target */
4766 IEM_MC_BEGIN(0, 1);
4767 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4768 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4770 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4771 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4772 } IEM_MC_ELSE() {
4773 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4774 } IEM_MC_ENDIF();
4775 IEM_MC_ADVANCE_RIP();
4776 IEM_MC_END();
4777 }
4778 return VINF_SUCCESS;
4779}
4780
4781
4782/**
4783 * Common 'push segment-register' helper.
4784 */
4785FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4786{
4787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4788 if (iReg < X86_SREG_FS)
4789 IEMOP_HLP_NO_64BIT();
4790 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4791
4792 switch (pVCpu->iem.s.enmEffOpSize)
4793 {
4794 case IEMMODE_16BIT:
4795 IEM_MC_BEGIN(0, 1);
4796 IEM_MC_LOCAL(uint16_t, u16Value);
4797 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4798 IEM_MC_PUSH_U16(u16Value);
4799 IEM_MC_ADVANCE_RIP();
4800 IEM_MC_END();
4801 break;
4802
4803 case IEMMODE_32BIT:
4804 IEM_MC_BEGIN(0, 1);
4805 IEM_MC_LOCAL(uint32_t, u32Value);
4806 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4807 IEM_MC_PUSH_U32_SREG(u32Value);
4808 IEM_MC_ADVANCE_RIP();
4809 IEM_MC_END();
4810 break;
4811
4812 case IEMMODE_64BIT:
4813 IEM_MC_BEGIN(0, 1);
4814 IEM_MC_LOCAL(uint64_t, u64Value);
4815 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4816 IEM_MC_PUSH_U64(u64Value);
4817 IEM_MC_ADVANCE_RIP();
4818 IEM_MC_END();
4819 break;
4820 }
4821
4822 return VINF_SUCCESS;
4823}
4824
4825
4826/** Opcode 0x0f 0xa0. */
4827FNIEMOP_DEF(iemOp_push_fs)
4828{
4829 IEMOP_MNEMONIC(push_fs, "push fs");
4830 IEMOP_HLP_MIN_386();
4831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4832 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4833}
4834
4835
4836/** Opcode 0x0f 0xa1. */
4837FNIEMOP_DEF(iemOp_pop_fs)
4838{
4839 IEMOP_MNEMONIC(pop_fs, "pop fs");
4840 IEMOP_HLP_MIN_386();
4841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4842 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
4843}
4844
4845
4846/** Opcode 0x0f 0xa2. */
4847FNIEMOP_DEF(iemOp_cpuid)
4848{
4849 IEMOP_MNEMONIC(cpuid, "cpuid");
4850 IEMOP_HLP_MIN_486(); /* not all 486es. */
4851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4852 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4853}
4854
4855
4856/**
4857 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4858 * iemOp_bts_Ev_Gv.
4859 */
4860FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4861{
4862 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4863 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4864
4865 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4866 {
4867 /* register destination. */
4868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4869 switch (pVCpu->iem.s.enmEffOpSize)
4870 {
4871 case IEMMODE_16BIT:
4872 IEM_MC_BEGIN(3, 0);
4873 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4874 IEM_MC_ARG(uint16_t, u16Src, 1);
4875 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4876
4877 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4878 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4879 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4880 IEM_MC_REF_EFLAGS(pEFlags);
4881 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4882
4883 IEM_MC_ADVANCE_RIP();
4884 IEM_MC_END();
4885 return VINF_SUCCESS;
4886
4887 case IEMMODE_32BIT:
4888 IEM_MC_BEGIN(3, 0);
4889 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4890 IEM_MC_ARG(uint32_t, u32Src, 1);
4891 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4892
4893 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4894 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4895 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4896 IEM_MC_REF_EFLAGS(pEFlags);
4897 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4898
4899 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4900 IEM_MC_ADVANCE_RIP();
4901 IEM_MC_END();
4902 return VINF_SUCCESS;
4903
4904 case IEMMODE_64BIT:
4905 IEM_MC_BEGIN(3, 0);
4906 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4907 IEM_MC_ARG(uint64_t, u64Src, 1);
4908 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4909
4910 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4911 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4912 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4913 IEM_MC_REF_EFLAGS(pEFlags);
4914 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4915
4916 IEM_MC_ADVANCE_RIP();
4917 IEM_MC_END();
4918 return VINF_SUCCESS;
4919
4920 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4921 }
4922 }
4923 else
4924 {
4925 /* memory destination. */
4926
4927 uint32_t fAccess;
4928 if (pImpl->pfnLockedU16)
4929 fAccess = IEM_ACCESS_DATA_RW;
4930 else /* BT */
4931 fAccess = IEM_ACCESS_DATA_R;
4932
4933 /** @todo test negative bit offsets! */
4934 switch (pVCpu->iem.s.enmEffOpSize)
4935 {
4936 case IEMMODE_16BIT:
4937 IEM_MC_BEGIN(3, 2);
4938 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4939 IEM_MC_ARG(uint16_t, u16Src, 1);
4940 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4941 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4942 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4943
4944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4945 if (pImpl->pfnLockedU16)
4946 IEMOP_HLP_DONE_DECODING();
4947 else
4948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4949 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4950 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4951 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4952 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4953 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
4954 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4955 IEM_MC_FETCH_EFLAGS(EFlags);
4956
4957 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4958 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4959 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4960 else
4961 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4962 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
4963
4964 IEM_MC_COMMIT_EFLAGS(EFlags);
4965 IEM_MC_ADVANCE_RIP();
4966 IEM_MC_END();
4967 return VINF_SUCCESS;
4968
4969 case IEMMODE_32BIT:
4970 IEM_MC_BEGIN(3, 2);
4971 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4972 IEM_MC_ARG(uint32_t, u32Src, 1);
4973 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4974 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4975 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4976
4977 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4978 if (pImpl->pfnLockedU16)
4979 IEMOP_HLP_DONE_DECODING();
4980 else
4981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4982 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4983 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4984 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4985 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4986 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4987 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4988 IEM_MC_FETCH_EFLAGS(EFlags);
4989
4990 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4991 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4992 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4993 else
4994 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4995 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
4996
4997 IEM_MC_COMMIT_EFLAGS(EFlags);
4998 IEM_MC_ADVANCE_RIP();
4999 IEM_MC_END();
5000 return VINF_SUCCESS;
5001
5002 case IEMMODE_64BIT:
5003 IEM_MC_BEGIN(3, 2);
5004 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5005 IEM_MC_ARG(uint64_t, u64Src, 1);
5006 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5008 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5009
5010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5011 if (pImpl->pfnLockedU16)
5012 IEMOP_HLP_DONE_DECODING();
5013 else
5014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5015 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5016 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5017 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5018 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5019 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5020 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5021 IEM_MC_FETCH_EFLAGS(EFlags);
5022
5023 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5024 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5025 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5026 else
5027 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5028 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5029
5030 IEM_MC_COMMIT_EFLAGS(EFlags);
5031 IEM_MC_ADVANCE_RIP();
5032 IEM_MC_END();
5033 return VINF_SUCCESS;
5034
5035 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5036 }
5037 }
5038}
5039
5040
5041/** Opcode 0x0f 0xa3. */
5042FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5043{
5044 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5045 IEMOP_HLP_MIN_386();
5046 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5047}
5048
5049
5050/**
5051 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5052 */
5053FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5054{
5055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5056 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5057
5058 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5059 {
5060 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5062
5063 switch (pVCpu->iem.s.enmEffOpSize)
5064 {
5065 case IEMMODE_16BIT:
5066 IEM_MC_BEGIN(4, 0);
5067 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5068 IEM_MC_ARG(uint16_t, u16Src, 1);
5069 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5070 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5071
5072 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5073 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5074 IEM_MC_REF_EFLAGS(pEFlags);
5075 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5076
5077 IEM_MC_ADVANCE_RIP();
5078 IEM_MC_END();
5079 return VINF_SUCCESS;
5080
5081 case IEMMODE_32BIT:
5082 IEM_MC_BEGIN(4, 0);
5083 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5084 IEM_MC_ARG(uint32_t, u32Src, 1);
5085 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5086 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5087
5088 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5089 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5090 IEM_MC_REF_EFLAGS(pEFlags);
5091 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5092
5093 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5094 IEM_MC_ADVANCE_RIP();
5095 IEM_MC_END();
5096 return VINF_SUCCESS;
5097
5098 case IEMMODE_64BIT:
5099 IEM_MC_BEGIN(4, 0);
5100 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5101 IEM_MC_ARG(uint64_t, u64Src, 1);
5102 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5103 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5104
5105 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5106 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5107 IEM_MC_REF_EFLAGS(pEFlags);
5108 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5109
5110 IEM_MC_ADVANCE_RIP();
5111 IEM_MC_END();
5112 return VINF_SUCCESS;
5113
5114 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5115 }
5116 }
5117 else
5118 {
5119 switch (pVCpu->iem.s.enmEffOpSize)
5120 {
5121 case IEMMODE_16BIT:
5122 IEM_MC_BEGIN(4, 2);
5123 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5124 IEM_MC_ARG(uint16_t, u16Src, 1);
5125 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5126 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5127 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5128
5129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5130 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5131 IEM_MC_ASSIGN(cShiftArg, cShift);
5132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5133 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5134 IEM_MC_FETCH_EFLAGS(EFlags);
5135 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5136 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5137
5138 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5139 IEM_MC_COMMIT_EFLAGS(EFlags);
5140 IEM_MC_ADVANCE_RIP();
5141 IEM_MC_END();
5142 return VINF_SUCCESS;
5143
5144 case IEMMODE_32BIT:
5145 IEM_MC_BEGIN(4, 2);
5146 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5147 IEM_MC_ARG(uint32_t, u32Src, 1);
5148 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5149 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5151
5152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5153 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5154 IEM_MC_ASSIGN(cShiftArg, cShift);
5155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5156 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5157 IEM_MC_FETCH_EFLAGS(EFlags);
5158 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5159 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5160
5161 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5162 IEM_MC_COMMIT_EFLAGS(EFlags);
5163 IEM_MC_ADVANCE_RIP();
5164 IEM_MC_END();
5165 return VINF_SUCCESS;
5166
5167 case IEMMODE_64BIT:
5168 IEM_MC_BEGIN(4, 2);
5169 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5170 IEM_MC_ARG(uint64_t, u64Src, 1);
5171 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5172 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5174
5175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5176 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5177 IEM_MC_ASSIGN(cShiftArg, cShift);
5178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5179 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5180 IEM_MC_FETCH_EFLAGS(EFlags);
5181 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5182 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5183
5184 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5185 IEM_MC_COMMIT_EFLAGS(EFlags);
5186 IEM_MC_ADVANCE_RIP();
5187 IEM_MC_END();
5188 return VINF_SUCCESS;
5189
5190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5191 }
5192 }
5193}
5194
5195
5196/**
5197 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5198 */
5199FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5200{
5201 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5202 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5203
5204 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5205 {
5206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5207
5208 switch (pVCpu->iem.s.enmEffOpSize)
5209 {
5210 case IEMMODE_16BIT:
5211 IEM_MC_BEGIN(4, 0);
5212 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5213 IEM_MC_ARG(uint16_t, u16Src, 1);
5214 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5215 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5216
5217 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5218 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5219 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5220 IEM_MC_REF_EFLAGS(pEFlags);
5221 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5222
5223 IEM_MC_ADVANCE_RIP();
5224 IEM_MC_END();
5225 return VINF_SUCCESS;
5226
5227 case IEMMODE_32BIT:
5228 IEM_MC_BEGIN(4, 0);
5229 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5230 IEM_MC_ARG(uint32_t, u32Src, 1);
5231 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5232 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5233
5234 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5235 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5236 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5237 IEM_MC_REF_EFLAGS(pEFlags);
5238 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5239
5240 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5241 IEM_MC_ADVANCE_RIP();
5242 IEM_MC_END();
5243 return VINF_SUCCESS;
5244
5245 case IEMMODE_64BIT:
5246 IEM_MC_BEGIN(4, 0);
5247 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5248 IEM_MC_ARG(uint64_t, u64Src, 1);
5249 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5250 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5251
5252 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5253 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5254 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5255 IEM_MC_REF_EFLAGS(pEFlags);
5256 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5257
5258 IEM_MC_ADVANCE_RIP();
5259 IEM_MC_END();
5260 return VINF_SUCCESS;
5261
5262 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5263 }
5264 }
5265 else
5266 {
5267 switch (pVCpu->iem.s.enmEffOpSize)
5268 {
5269 case IEMMODE_16BIT:
5270 IEM_MC_BEGIN(4, 2);
5271 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5272 IEM_MC_ARG(uint16_t, u16Src, 1);
5273 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5274 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5276
5277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5279 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5280 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5281 IEM_MC_FETCH_EFLAGS(EFlags);
5282 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5283 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5284
5285 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5286 IEM_MC_COMMIT_EFLAGS(EFlags);
5287 IEM_MC_ADVANCE_RIP();
5288 IEM_MC_END();
5289 return VINF_SUCCESS;
5290
5291 case IEMMODE_32BIT:
5292 IEM_MC_BEGIN(4, 2);
5293 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5294 IEM_MC_ARG(uint32_t, u32Src, 1);
5295 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5296 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5298
5299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5301 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5302 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5303 IEM_MC_FETCH_EFLAGS(EFlags);
5304 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5305 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5306
5307 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5308 IEM_MC_COMMIT_EFLAGS(EFlags);
5309 IEM_MC_ADVANCE_RIP();
5310 IEM_MC_END();
5311 return VINF_SUCCESS;
5312
5313 case IEMMODE_64BIT:
5314 IEM_MC_BEGIN(4, 2);
5315 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5316 IEM_MC_ARG(uint64_t, u64Src, 1);
5317 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5318 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5320
5321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5323 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5324 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5325 IEM_MC_FETCH_EFLAGS(EFlags);
5326 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5327 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5328
5329 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5330 IEM_MC_COMMIT_EFLAGS(EFlags);
5331 IEM_MC_ADVANCE_RIP();
5332 IEM_MC_END();
5333 return VINF_SUCCESS;
5334
5335 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5336 }
5337 }
5338}
5339
5340
5341
5342/** Opcode 0x0f 0xa4. */
5343FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5344{
5345 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5346 IEMOP_HLP_MIN_386();
5347 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5348}
5349
5350
5351/** Opcode 0x0f 0xa5. */
5352FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5353{
5354 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5355 IEMOP_HLP_MIN_386();
5356 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5357}
5358
5359
5360/** Opcode 0x0f 0xa8. */
5361FNIEMOP_DEF(iemOp_push_gs)
5362{
5363 IEMOP_MNEMONIC(push_gs, "push gs");
5364 IEMOP_HLP_MIN_386();
5365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5366 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5367}
5368
5369
5370/** Opcode 0x0f 0xa9. */
5371FNIEMOP_DEF(iemOp_pop_gs)
5372{
5373 IEMOP_MNEMONIC(pop_gs, "pop gs");
5374 IEMOP_HLP_MIN_386();
5375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5376 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5377}
5378
5379
5380/** Opcode 0x0f 0xaa. */
5381FNIEMOP_STUB(iemOp_rsm);
5382//IEMOP_HLP_MIN_386();
5383
5384
5385/** Opcode 0x0f 0xab. */
5386FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5387{
5388 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5389 IEMOP_HLP_MIN_386();
5390 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5391}
5392
5393
5394/** Opcode 0x0f 0xac. */
5395FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5396{
5397 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5398 IEMOP_HLP_MIN_386();
5399 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5400}
5401
5402
5403/** Opcode 0x0f 0xad. */
5404FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5405{
5406 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5407 IEMOP_HLP_MIN_386();
5408 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5409}
5410
5411
5412/** Opcode 0x0f 0xae mem/0. */
5413FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5414{
5415 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5416 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5417 return IEMOP_RAISE_INVALID_OPCODE();
5418
5419 IEM_MC_BEGIN(3, 1);
5420 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5421 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5422 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5425 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5426 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5427 IEM_MC_END();
5428 return VINF_SUCCESS;
5429}
5430
5431
5432/** Opcode 0x0f 0xae mem/1. */
5433FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5434{
5435 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5436 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5437 return IEMOP_RAISE_INVALID_OPCODE();
5438
5439 IEM_MC_BEGIN(3, 1);
5440 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5441 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5442 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5445 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5446 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5447 IEM_MC_END();
5448 return VINF_SUCCESS;
5449}
5450
5451
5452/** Opcode 0x0f 0xae mem/2. */
5453FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5454
5455/** Opcode 0x0f 0xae mem/3. */
5456FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5457
5458/** Opcode 0x0f 0xae mem/4. */
5459FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5460
5461/** Opcode 0x0f 0xae mem/5. */
5462FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5463
5464/** Opcode 0x0f 0xae mem/6. */
5465FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5466
5467/** Opcode 0x0f 0xae mem/7. */
5468FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5469
5470
5471/** Opcode 0x0f 0xae 11b/5. */
5472FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5473{
5474 RT_NOREF_PV(bRm);
5475 IEMOP_MNEMONIC(lfence, "lfence");
5476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5477 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5478 return IEMOP_RAISE_INVALID_OPCODE();
5479
5480 IEM_MC_BEGIN(0, 0);
5481 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5482 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5483 else
5484 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5485 IEM_MC_ADVANCE_RIP();
5486 IEM_MC_END();
5487 return VINF_SUCCESS;
5488}
5489
5490
5491/** Opcode 0x0f 0xae 11b/6. */
5492FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5493{
5494 RT_NOREF_PV(bRm);
5495 IEMOP_MNEMONIC(mfence, "mfence");
5496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5497 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5498 return IEMOP_RAISE_INVALID_OPCODE();
5499
5500 IEM_MC_BEGIN(0, 0);
5501 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5502 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5503 else
5504 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5505 IEM_MC_ADVANCE_RIP();
5506 IEM_MC_END();
5507 return VINF_SUCCESS;
5508}
5509
5510
5511/** Opcode 0x0f 0xae 11b/7. */
5512FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5513{
5514 RT_NOREF_PV(bRm);
5515 IEMOP_MNEMONIC(sfence, "sfence");
5516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5517 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5518 return IEMOP_RAISE_INVALID_OPCODE();
5519
5520 IEM_MC_BEGIN(0, 0);
5521 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5522 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5523 else
5524 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5525 IEM_MC_ADVANCE_RIP();
5526 IEM_MC_END();
5527 return VINF_SUCCESS;
5528}
5529
5530
5531/** Opcode 0xf3 0x0f 0xae 11b/0. */
5532FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5533
5534/** Opcode 0xf3 0x0f 0xae 11b/1. */
5535FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5536
5537/** Opcode 0xf3 0x0f 0xae 11b/2. */
5538FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5539
5540/** Opcode 0xf3 0x0f 0xae 11b/3. */
5541FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5542
5543
5544/** Opcode 0x0f 0xae. */
5545FNIEMOP_DEF(iemOp_Grp15)
5546{
5547 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5548 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5549 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5550 {
5551 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5552 {
5553 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5554 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5555 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5556 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5557 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5558 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5559 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5560 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5561 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5562 }
5563 }
5564 else
5565 {
5566 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5567 {
5568 case 0:
5569 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5570 {
5571 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5572 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5573 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5574 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5575 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5576 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5577 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5578 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5579 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5580 }
5581 break;
5582
5583 case IEM_OP_PRF_REPZ:
5584 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5585 {
5586 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5587 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5588 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5589 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5590 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5591 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5592 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5593 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5594 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5595 }
5596 break;
5597
5598 default:
5599 return IEMOP_RAISE_INVALID_OPCODE();
5600 }
5601 }
5602}
5603
5604
5605/** Opcode 0x0f 0xaf. */
5606FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5607{
5608 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
5609 IEMOP_HLP_MIN_386();
5610 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5611 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5612}
5613
5614
5615/** Opcode 0x0f 0xb0. */
5616FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5617{
5618 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
5619 IEMOP_HLP_MIN_486();
5620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5621
5622 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5623 {
5624 IEMOP_HLP_DONE_DECODING();
5625 IEM_MC_BEGIN(4, 0);
5626 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5627 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5628 IEM_MC_ARG(uint8_t, u8Src, 2);
5629 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5630
5631 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5632 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5633 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5634 IEM_MC_REF_EFLAGS(pEFlags);
5635 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5636 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5637 else
5638 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5639
5640 IEM_MC_ADVANCE_RIP();
5641 IEM_MC_END();
5642 }
5643 else
5644 {
5645 IEM_MC_BEGIN(4, 3);
5646 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5647 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5648 IEM_MC_ARG(uint8_t, u8Src, 2);
5649 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5651 IEM_MC_LOCAL(uint8_t, u8Al);
5652
5653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5654 IEMOP_HLP_DONE_DECODING();
5655 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5656 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5657 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5658 IEM_MC_FETCH_EFLAGS(EFlags);
5659 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5660 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5661 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5662 else
5663 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5664
5665 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5666 IEM_MC_COMMIT_EFLAGS(EFlags);
5667 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5668 IEM_MC_ADVANCE_RIP();
5669 IEM_MC_END();
5670 }
5671 return VINF_SUCCESS;
5672}
5673
5674/** Opcode 0x0f 0xb1. */
5675FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5676{
5677 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
5678 IEMOP_HLP_MIN_486();
5679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5680
5681 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5682 {
5683 IEMOP_HLP_DONE_DECODING();
5684 switch (pVCpu->iem.s.enmEffOpSize)
5685 {
5686 case IEMMODE_16BIT:
5687 IEM_MC_BEGIN(4, 0);
5688 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5689 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5690 IEM_MC_ARG(uint16_t, u16Src, 2);
5691 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5692
5693 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5694 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5695 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5696 IEM_MC_REF_EFLAGS(pEFlags);
5697 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5698 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5699 else
5700 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5701
5702 IEM_MC_ADVANCE_RIP();
5703 IEM_MC_END();
5704 return VINF_SUCCESS;
5705
5706 case IEMMODE_32BIT:
5707 IEM_MC_BEGIN(4, 0);
5708 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5709 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5710 IEM_MC_ARG(uint32_t, u32Src, 2);
5711 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5712
5713 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5714 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5715 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5716 IEM_MC_REF_EFLAGS(pEFlags);
5717 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5718 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5719 else
5720 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5721
5722 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5723 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5724 IEM_MC_ADVANCE_RIP();
5725 IEM_MC_END();
5726 return VINF_SUCCESS;
5727
5728 case IEMMODE_64BIT:
5729 IEM_MC_BEGIN(4, 0);
5730 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5731 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5732#ifdef RT_ARCH_X86
5733 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5734#else
5735 IEM_MC_ARG(uint64_t, u64Src, 2);
5736#endif
5737 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5738
5739 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5740 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5741 IEM_MC_REF_EFLAGS(pEFlags);
5742#ifdef RT_ARCH_X86
5743 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5744 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5745 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5746 else
5747 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5748#else
5749 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5750 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5751 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5752 else
5753 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5754#endif
5755
5756 IEM_MC_ADVANCE_RIP();
5757 IEM_MC_END();
5758 return VINF_SUCCESS;
5759
5760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5761 }
5762 }
5763 else
5764 {
5765 switch (pVCpu->iem.s.enmEffOpSize)
5766 {
5767 case IEMMODE_16BIT:
5768 IEM_MC_BEGIN(4, 3);
5769 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5770 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5771 IEM_MC_ARG(uint16_t, u16Src, 2);
5772 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5773 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5774 IEM_MC_LOCAL(uint16_t, u16Ax);
5775
5776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5777 IEMOP_HLP_DONE_DECODING();
5778 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5779 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5780 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5781 IEM_MC_FETCH_EFLAGS(EFlags);
5782 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5783 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5784 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5785 else
5786 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5787
5788 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5789 IEM_MC_COMMIT_EFLAGS(EFlags);
5790 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5791 IEM_MC_ADVANCE_RIP();
5792 IEM_MC_END();
5793 return VINF_SUCCESS;
5794
5795 case IEMMODE_32BIT:
5796 IEM_MC_BEGIN(4, 3);
5797 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5798 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5799 IEM_MC_ARG(uint32_t, u32Src, 2);
5800 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5802 IEM_MC_LOCAL(uint32_t, u32Eax);
5803
5804 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5805 IEMOP_HLP_DONE_DECODING();
5806 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5807 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5808 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5809 IEM_MC_FETCH_EFLAGS(EFlags);
5810 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5811 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5812 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5813 else
5814 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5815
5816 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5817 IEM_MC_COMMIT_EFLAGS(EFlags);
5818 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5819 IEM_MC_ADVANCE_RIP();
5820 IEM_MC_END();
5821 return VINF_SUCCESS;
5822
5823 case IEMMODE_64BIT:
5824 IEM_MC_BEGIN(4, 3);
5825 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5826 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5827#ifdef RT_ARCH_X86
5828 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5829#else
5830 IEM_MC_ARG(uint64_t, u64Src, 2);
5831#endif
5832 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5834 IEM_MC_LOCAL(uint64_t, u64Rax);
5835
5836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5837 IEMOP_HLP_DONE_DECODING();
5838 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5839 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5840 IEM_MC_FETCH_EFLAGS(EFlags);
5841 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5842#ifdef RT_ARCH_X86
5843 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5844 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5845 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5846 else
5847 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5848#else
5849 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5850 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5851 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5852 else
5853 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5854#endif
5855
5856 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5857 IEM_MC_COMMIT_EFLAGS(EFlags);
5858 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5859 IEM_MC_ADVANCE_RIP();
5860 IEM_MC_END();
5861 return VINF_SUCCESS;
5862
5863 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5864 }
5865 }
5866}
5867
5868
5869FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5870{
5871 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5872 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
5873
5874 switch (pVCpu->iem.s.enmEffOpSize)
5875 {
5876 case IEMMODE_16BIT:
5877 IEM_MC_BEGIN(5, 1);
5878 IEM_MC_ARG(uint16_t, uSel, 0);
5879 IEM_MC_ARG(uint16_t, offSeg, 1);
5880 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5881 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5882 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5883 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5886 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5887 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
5888 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5889 IEM_MC_END();
5890 return VINF_SUCCESS;
5891
5892 case IEMMODE_32BIT:
5893 IEM_MC_BEGIN(5, 1);
5894 IEM_MC_ARG(uint16_t, uSel, 0);
5895 IEM_MC_ARG(uint32_t, offSeg, 1);
5896 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5897 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5898 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5899 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5902 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5903 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
5904 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5905 IEM_MC_END();
5906 return VINF_SUCCESS;
5907
5908 case IEMMODE_64BIT:
5909 IEM_MC_BEGIN(5, 1);
5910 IEM_MC_ARG(uint16_t, uSel, 0);
5911 IEM_MC_ARG(uint64_t, offSeg, 1);
5912 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5913 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5914 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5915 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5916 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5918 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5919 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5920 else
5921 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5922 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
5923 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5924 IEM_MC_END();
5925 return VINF_SUCCESS;
5926
5927 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5928 }
5929}
5930
5931
5932/** Opcode 0x0f 0xb2. */
5933FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5934{
5935 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
5936 IEMOP_HLP_MIN_386();
5937 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5938 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5939 return IEMOP_RAISE_INVALID_OPCODE();
5940 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5941}
5942
5943
5944/** Opcode 0x0f 0xb3. */
5945FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5946{
5947 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
5948 IEMOP_HLP_MIN_386();
5949 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5950}
5951
5952
5953/** Opcode 0x0f 0xb4. */
5954FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5955{
5956 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
5957 IEMOP_HLP_MIN_386();
5958 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5959 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5960 return IEMOP_RAISE_INVALID_OPCODE();
5961 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5962}
5963
5964
5965/** Opcode 0x0f 0xb5. */
5966FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5967{
5968 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
5969 IEMOP_HLP_MIN_386();
5970 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5971 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5972 return IEMOP_RAISE_INVALID_OPCODE();
5973 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5974}
5975
5976
5977/** Opcode 0x0f 0xb6. */
5978FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5979{
5980 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
5981 IEMOP_HLP_MIN_386();
5982
5983 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5984
5985 /*
5986 * If rm is denoting a register, no more instruction bytes.
5987 */
5988 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5989 {
5990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5991 switch (pVCpu->iem.s.enmEffOpSize)
5992 {
5993 case IEMMODE_16BIT:
5994 IEM_MC_BEGIN(0, 1);
5995 IEM_MC_LOCAL(uint16_t, u16Value);
5996 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5997 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
5998 IEM_MC_ADVANCE_RIP();
5999 IEM_MC_END();
6000 return VINF_SUCCESS;
6001
6002 case IEMMODE_32BIT:
6003 IEM_MC_BEGIN(0, 1);
6004 IEM_MC_LOCAL(uint32_t, u32Value);
6005 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6006 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6007 IEM_MC_ADVANCE_RIP();
6008 IEM_MC_END();
6009 return VINF_SUCCESS;
6010
6011 case IEMMODE_64BIT:
6012 IEM_MC_BEGIN(0, 1);
6013 IEM_MC_LOCAL(uint64_t, u64Value);
6014 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6015 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6016 IEM_MC_ADVANCE_RIP();
6017 IEM_MC_END();
6018 return VINF_SUCCESS;
6019
6020 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6021 }
6022 }
6023 else
6024 {
6025 /*
6026 * We're loading a register from memory.
6027 */
6028 switch (pVCpu->iem.s.enmEffOpSize)
6029 {
6030 case IEMMODE_16BIT:
6031 IEM_MC_BEGIN(0, 2);
6032 IEM_MC_LOCAL(uint16_t, u16Value);
6033 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6034 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6036 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6037 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6038 IEM_MC_ADVANCE_RIP();
6039 IEM_MC_END();
6040 return VINF_SUCCESS;
6041
6042 case IEMMODE_32BIT:
6043 IEM_MC_BEGIN(0, 2);
6044 IEM_MC_LOCAL(uint32_t, u32Value);
6045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6048 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6049 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6050 IEM_MC_ADVANCE_RIP();
6051 IEM_MC_END();
6052 return VINF_SUCCESS;
6053
6054 case IEMMODE_64BIT:
6055 IEM_MC_BEGIN(0, 2);
6056 IEM_MC_LOCAL(uint64_t, u64Value);
6057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6060 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6061 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6062 IEM_MC_ADVANCE_RIP();
6063 IEM_MC_END();
6064 return VINF_SUCCESS;
6065
6066 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6067 }
6068 }
6069}
6070
6071
6072/** Opcode 0x0f 0xb7. */
6073FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6074{
6075 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6076 IEMOP_HLP_MIN_386();
6077
6078 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6079
6080 /** @todo Not entirely sure how the operand size prefix is handled here,
6081 * assuming that it will be ignored. Would be nice to have a few
6082 * test for this. */
6083 /*
6084 * If rm is denoting a register, no more instruction bytes.
6085 */
6086 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6087 {
6088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6089 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6090 {
6091 IEM_MC_BEGIN(0, 1);
6092 IEM_MC_LOCAL(uint32_t, u32Value);
6093 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6094 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6095 IEM_MC_ADVANCE_RIP();
6096 IEM_MC_END();
6097 }
6098 else
6099 {
6100 IEM_MC_BEGIN(0, 1);
6101 IEM_MC_LOCAL(uint64_t, u64Value);
6102 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6103 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6104 IEM_MC_ADVANCE_RIP();
6105 IEM_MC_END();
6106 }
6107 }
6108 else
6109 {
6110 /*
6111 * We're loading a register from memory.
6112 */
6113 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6114 {
6115 IEM_MC_BEGIN(0, 2);
6116 IEM_MC_LOCAL(uint32_t, u32Value);
6117 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6120 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6121 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6122 IEM_MC_ADVANCE_RIP();
6123 IEM_MC_END();
6124 }
6125 else
6126 {
6127 IEM_MC_BEGIN(0, 2);
6128 IEM_MC_LOCAL(uint64_t, u64Value);
6129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6132 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6133 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6134 IEM_MC_ADVANCE_RIP();
6135 IEM_MC_END();
6136 }
6137 }
6138 return VINF_SUCCESS;
6139}
6140
6141
6142/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6143FNIEMOP_UD_STUB(iemOp_jmpe);
6144/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6145FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6146
6147
6148/** Opcode 0x0f 0xb9. */
6149FNIEMOP_DEF(iemOp_Grp10)
6150{
6151 Log(("iemOp_Grp10 -> #UD\n"));
6152 return IEMOP_RAISE_INVALID_OPCODE();
6153}
6154
6155
6156/** Opcode 0x0f 0xba. */
6157FNIEMOP_DEF(iemOp_Grp8)
6158{
6159 IEMOP_HLP_MIN_386();
6160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6161 PCIEMOPBINSIZES pImpl;
6162 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6163 {
6164 case 0: case 1: case 2: case 3:
6165 return IEMOP_RAISE_INVALID_OPCODE();
6166 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6167 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6168 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6169 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6170 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6171 }
6172 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6173
6174 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6175 {
6176 /* register destination. */
6177 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6179
6180 switch (pVCpu->iem.s.enmEffOpSize)
6181 {
6182 case IEMMODE_16BIT:
6183 IEM_MC_BEGIN(3, 0);
6184 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6185 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6186 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6187
6188 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6189 IEM_MC_REF_EFLAGS(pEFlags);
6190 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6191
6192 IEM_MC_ADVANCE_RIP();
6193 IEM_MC_END();
6194 return VINF_SUCCESS;
6195
6196 case IEMMODE_32BIT:
6197 IEM_MC_BEGIN(3, 0);
6198 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6199 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6200 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6201
6202 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6203 IEM_MC_REF_EFLAGS(pEFlags);
6204 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6205
6206 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6207 IEM_MC_ADVANCE_RIP();
6208 IEM_MC_END();
6209 return VINF_SUCCESS;
6210
6211 case IEMMODE_64BIT:
6212 IEM_MC_BEGIN(3, 0);
6213 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6214 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6215 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6216
6217 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6218 IEM_MC_REF_EFLAGS(pEFlags);
6219 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6220
6221 IEM_MC_ADVANCE_RIP();
6222 IEM_MC_END();
6223 return VINF_SUCCESS;
6224
6225 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6226 }
6227 }
6228 else
6229 {
6230 /* memory destination. */
6231
6232 uint32_t fAccess;
6233 if (pImpl->pfnLockedU16)
6234 fAccess = IEM_ACCESS_DATA_RW;
6235 else /* BT */
6236 fAccess = IEM_ACCESS_DATA_R;
6237
6238 /** @todo test negative bit offsets! */
6239 switch (pVCpu->iem.s.enmEffOpSize)
6240 {
6241 case IEMMODE_16BIT:
6242 IEM_MC_BEGIN(3, 1);
6243 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6244 IEM_MC_ARG(uint16_t, u16Src, 1);
6245 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6246 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6247
6248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6249 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6250 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6251 if (pImpl->pfnLockedU16)
6252 IEMOP_HLP_DONE_DECODING();
6253 else
6254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6255 IEM_MC_FETCH_EFLAGS(EFlags);
6256 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6257 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6258 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6259 else
6260 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6261 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6262
6263 IEM_MC_COMMIT_EFLAGS(EFlags);
6264 IEM_MC_ADVANCE_RIP();
6265 IEM_MC_END();
6266 return VINF_SUCCESS;
6267
6268 case IEMMODE_32BIT:
6269 IEM_MC_BEGIN(3, 1);
6270 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6271 IEM_MC_ARG(uint32_t, u32Src, 1);
6272 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6274
6275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6276 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6277 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6278 if (pImpl->pfnLockedU16)
6279 IEMOP_HLP_DONE_DECODING();
6280 else
6281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6282 IEM_MC_FETCH_EFLAGS(EFlags);
6283 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6284 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6285 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6286 else
6287 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6288 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6289
6290 IEM_MC_COMMIT_EFLAGS(EFlags);
6291 IEM_MC_ADVANCE_RIP();
6292 IEM_MC_END();
6293 return VINF_SUCCESS;
6294
6295 case IEMMODE_64BIT:
6296 IEM_MC_BEGIN(3, 1);
6297 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6298 IEM_MC_ARG(uint64_t, u64Src, 1);
6299 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6301
6302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6303 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6304 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6305 if (pImpl->pfnLockedU16)
6306 IEMOP_HLP_DONE_DECODING();
6307 else
6308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6309 IEM_MC_FETCH_EFLAGS(EFlags);
6310 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6311 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6312 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6313 else
6314 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6315 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6316
6317 IEM_MC_COMMIT_EFLAGS(EFlags);
6318 IEM_MC_ADVANCE_RIP();
6319 IEM_MC_END();
6320 return VINF_SUCCESS;
6321
6322 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6323 }
6324 }
6325
6326}
6327
6328
6329/** Opcode 0x0f 0xbb. */
6330FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6331{
6332 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6333 IEMOP_HLP_MIN_386();
6334 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6335}
6336
6337
6338/** Opcode 0x0f 0xbc. */
6339FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6340{
6341 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6342 IEMOP_HLP_MIN_386();
6343 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6344 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6345}
6346
6347
6348/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6349FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6350
6351
6352/** Opcode 0x0f 0xbd. */
6353FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6354{
6355 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6356 IEMOP_HLP_MIN_386();
6357 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6358 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6359}
6360
6361
6362/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6363FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6364
6365
6366/** Opcode 0x0f 0xbe. */
6367FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6368{
6369 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6370 IEMOP_HLP_MIN_386();
6371
6372 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6373
6374 /*
6375 * If rm is denoting a register, no more instruction bytes.
6376 */
6377 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6378 {
6379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6380 switch (pVCpu->iem.s.enmEffOpSize)
6381 {
6382 case IEMMODE_16BIT:
6383 IEM_MC_BEGIN(0, 1);
6384 IEM_MC_LOCAL(uint16_t, u16Value);
6385 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6386 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6387 IEM_MC_ADVANCE_RIP();
6388 IEM_MC_END();
6389 return VINF_SUCCESS;
6390
6391 case IEMMODE_32BIT:
6392 IEM_MC_BEGIN(0, 1);
6393 IEM_MC_LOCAL(uint32_t, u32Value);
6394 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6395 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6396 IEM_MC_ADVANCE_RIP();
6397 IEM_MC_END();
6398 return VINF_SUCCESS;
6399
6400 case IEMMODE_64BIT:
6401 IEM_MC_BEGIN(0, 1);
6402 IEM_MC_LOCAL(uint64_t, u64Value);
6403 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6404 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6405 IEM_MC_ADVANCE_RIP();
6406 IEM_MC_END();
6407 return VINF_SUCCESS;
6408
6409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6410 }
6411 }
6412 else
6413 {
6414 /*
6415 * We're loading a register from memory.
6416 */
6417 switch (pVCpu->iem.s.enmEffOpSize)
6418 {
6419 case IEMMODE_16BIT:
6420 IEM_MC_BEGIN(0, 2);
6421 IEM_MC_LOCAL(uint16_t, u16Value);
6422 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6425 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6426 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6427 IEM_MC_ADVANCE_RIP();
6428 IEM_MC_END();
6429 return VINF_SUCCESS;
6430
6431 case IEMMODE_32BIT:
6432 IEM_MC_BEGIN(0, 2);
6433 IEM_MC_LOCAL(uint32_t, u32Value);
6434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6437 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6438 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6439 IEM_MC_ADVANCE_RIP();
6440 IEM_MC_END();
6441 return VINF_SUCCESS;
6442
6443 case IEMMODE_64BIT:
6444 IEM_MC_BEGIN(0, 2);
6445 IEM_MC_LOCAL(uint64_t, u64Value);
6446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6449 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6450 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6451 IEM_MC_ADVANCE_RIP();
6452 IEM_MC_END();
6453 return VINF_SUCCESS;
6454
6455 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6456 }
6457 }
6458}
6459
6460
6461/** Opcode 0x0f 0xbf. */
6462FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6463{
6464 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6465 IEMOP_HLP_MIN_386();
6466
6467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6468
6469 /** @todo Not entirely sure how the operand size prefix is handled here,
6470 * assuming that it will be ignored. Would be nice to have a few
6471 * test for this. */
6472 /*
6473 * If rm is denoting a register, no more instruction bytes.
6474 */
6475 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6476 {
6477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6478 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6479 {
6480 IEM_MC_BEGIN(0, 1);
6481 IEM_MC_LOCAL(uint32_t, u32Value);
6482 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6483 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6484 IEM_MC_ADVANCE_RIP();
6485 IEM_MC_END();
6486 }
6487 else
6488 {
6489 IEM_MC_BEGIN(0, 1);
6490 IEM_MC_LOCAL(uint64_t, u64Value);
6491 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6492 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6493 IEM_MC_ADVANCE_RIP();
6494 IEM_MC_END();
6495 }
6496 }
6497 else
6498 {
6499 /*
6500 * We're loading a register from memory.
6501 */
6502 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6503 {
6504 IEM_MC_BEGIN(0, 2);
6505 IEM_MC_LOCAL(uint32_t, u32Value);
6506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6509 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6510 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6511 IEM_MC_ADVANCE_RIP();
6512 IEM_MC_END();
6513 }
6514 else
6515 {
6516 IEM_MC_BEGIN(0, 2);
6517 IEM_MC_LOCAL(uint64_t, u64Value);
6518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6521 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6522 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6523 IEM_MC_ADVANCE_RIP();
6524 IEM_MC_END();
6525 }
6526 }
6527 return VINF_SUCCESS;
6528}
6529
6530
6531/** Opcode 0x0f 0xc0. */
6532FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6533{
6534 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6535 IEMOP_HLP_MIN_486();
6536 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6537
6538 /*
6539 * If rm is denoting a register, no more instruction bytes.
6540 */
6541 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6542 {
6543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6544
6545 IEM_MC_BEGIN(3, 0);
6546 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6547 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6548 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6549
6550 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6551 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6552 IEM_MC_REF_EFLAGS(pEFlags);
6553 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6554
6555 IEM_MC_ADVANCE_RIP();
6556 IEM_MC_END();
6557 }
6558 else
6559 {
6560 /*
6561 * We're accessing memory.
6562 */
6563 IEM_MC_BEGIN(3, 3);
6564 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6565 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6566 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6567 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6569
6570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6571 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6572 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6573 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6574 IEM_MC_FETCH_EFLAGS(EFlags);
6575 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6576 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6577 else
6578 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6579
6580 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6581 IEM_MC_COMMIT_EFLAGS(EFlags);
6582 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6583 IEM_MC_ADVANCE_RIP();
6584 IEM_MC_END();
6585 return VINF_SUCCESS;
6586 }
6587 return VINF_SUCCESS;
6588}
6589
6590
6591/** Opcode 0x0f 0xc1. */
6592FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6593{
6594 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
6595 IEMOP_HLP_MIN_486();
6596 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6597
6598 /*
6599 * If rm is denoting a register, no more instruction bytes.
6600 */
6601 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6602 {
6603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6604
6605 switch (pVCpu->iem.s.enmEffOpSize)
6606 {
6607 case IEMMODE_16BIT:
6608 IEM_MC_BEGIN(3, 0);
6609 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6610 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6611 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6612
6613 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6614 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6615 IEM_MC_REF_EFLAGS(pEFlags);
6616 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6617
6618 IEM_MC_ADVANCE_RIP();
6619 IEM_MC_END();
6620 return VINF_SUCCESS;
6621
6622 case IEMMODE_32BIT:
6623 IEM_MC_BEGIN(3, 0);
6624 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6625 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6626 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6627
6628 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6629 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6630 IEM_MC_REF_EFLAGS(pEFlags);
6631 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6632
6633 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6634 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6635 IEM_MC_ADVANCE_RIP();
6636 IEM_MC_END();
6637 return VINF_SUCCESS;
6638
6639 case IEMMODE_64BIT:
6640 IEM_MC_BEGIN(3, 0);
6641 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6642 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6643 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6644
6645 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6646 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6647 IEM_MC_REF_EFLAGS(pEFlags);
6648 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6649
6650 IEM_MC_ADVANCE_RIP();
6651 IEM_MC_END();
6652 return VINF_SUCCESS;
6653
6654 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6655 }
6656 }
6657 else
6658 {
6659 /*
6660 * We're accessing memory.
6661 */
6662 switch (pVCpu->iem.s.enmEffOpSize)
6663 {
6664 case IEMMODE_16BIT:
6665 IEM_MC_BEGIN(3, 3);
6666 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6667 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6668 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6669 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6671
6672 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6673 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6674 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6675 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6676 IEM_MC_FETCH_EFLAGS(EFlags);
6677 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6678 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6679 else
6680 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6681
6682 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6683 IEM_MC_COMMIT_EFLAGS(EFlags);
6684 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
6685 IEM_MC_ADVANCE_RIP();
6686 IEM_MC_END();
6687 return VINF_SUCCESS;
6688
6689 case IEMMODE_32BIT:
6690 IEM_MC_BEGIN(3, 3);
6691 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6692 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6693 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6694 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6696
6697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6698 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6699 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6700 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6701 IEM_MC_FETCH_EFLAGS(EFlags);
6702 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6703 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6704 else
6705 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6706
6707 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6708 IEM_MC_COMMIT_EFLAGS(EFlags);
6709 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
6710 IEM_MC_ADVANCE_RIP();
6711 IEM_MC_END();
6712 return VINF_SUCCESS;
6713
6714 case IEMMODE_64BIT:
6715 IEM_MC_BEGIN(3, 3);
6716 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6717 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6718 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6719 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6721
6722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6723 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6724 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6725 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6726 IEM_MC_FETCH_EFLAGS(EFlags);
6727 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6728 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6729 else
6730 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6731
6732 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6733 IEM_MC_COMMIT_EFLAGS(EFlags);
6734 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
6735 IEM_MC_ADVANCE_RIP();
6736 IEM_MC_END();
6737 return VINF_SUCCESS;
6738
6739 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6740 }
6741 }
6742}
6743
6744
6745/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
6746FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
6747/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
6748FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
6749/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
6750FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
6751/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
6752FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
6753
6754
6755/** Opcode 0x0f 0xc3. */
6756FNIEMOP_DEF(iemOp_movnti_My_Gy)
6757{
6758 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
6759
6760 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6761
6762 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
6763 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6764 {
6765 switch (pVCpu->iem.s.enmEffOpSize)
6766 {
6767 case IEMMODE_32BIT:
6768 IEM_MC_BEGIN(0, 2);
6769 IEM_MC_LOCAL(uint32_t, u32Value);
6770 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6771
6772 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6774 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6775 return IEMOP_RAISE_INVALID_OPCODE();
6776
6777 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6778 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
6779 IEM_MC_ADVANCE_RIP();
6780 IEM_MC_END();
6781 break;
6782
6783 case IEMMODE_64BIT:
6784 IEM_MC_BEGIN(0, 2);
6785 IEM_MC_LOCAL(uint64_t, u64Value);
6786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6787
6788 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6790 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6791 return IEMOP_RAISE_INVALID_OPCODE();
6792
6793 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6794 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
6795 IEM_MC_ADVANCE_RIP();
6796 IEM_MC_END();
6797 break;
6798
6799 case IEMMODE_16BIT:
6800 /** @todo check this form. */
6801 return IEMOP_RAISE_INVALID_OPCODE();
6802 }
6803 }
6804 else
6805 return IEMOP_RAISE_INVALID_OPCODE();
6806 return VINF_SUCCESS;
6807}
6808/* Opcode 0x66 0x0f 0xc3 - invalid */
6809/* Opcode 0xf3 0x0f 0xc3 - invalid */
6810/* Opcode 0xf2 0x0f 0xc3 - invalid */
6811
6812/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
6813FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
6814/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
6815FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
6816/* Opcode 0xf3 0x0f 0xc4 - invalid */
6817/* Opcode 0xf2 0x0f 0xc4 - invalid */
6818
6819/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
6820FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
6821/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
6822FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
6823/* Opcode 0xf3 0x0f 0xc5 - invalid */
6824/* Opcode 0xf2 0x0f 0xc5 - invalid */
6825
6826/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
6827FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
6828/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
6829FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
6830/* Opcode 0xf3 0x0f 0xc6 - invalid */
6831/* Opcode 0xf2 0x0f 0xc6 - invalid */
6832
6833
6834/** Opcode 0x0f 0xc7 !11/1. */
6835FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6836{
6837 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
6838
6839 IEM_MC_BEGIN(4, 3);
6840 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6841 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6842 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6843 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6844 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6845 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6847
6848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6849 IEMOP_HLP_DONE_DECODING();
6850 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6851
6852 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6853 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6854 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6855
6856 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6857 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6858 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6859
6860 IEM_MC_FETCH_EFLAGS(EFlags);
6861 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6862 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6863 else
6864 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6865
6866 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6867 IEM_MC_COMMIT_EFLAGS(EFlags);
6868 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6869 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6870 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6871 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6872 IEM_MC_ENDIF();
6873 IEM_MC_ADVANCE_RIP();
6874
6875 IEM_MC_END();
6876 return VINF_SUCCESS;
6877}
6878
6879
6880/** Opcode REX.W 0x0f 0xc7 !11/1. */
6881FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
6882{
6883 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
6884 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
6885 {
6886#if 0
6887 RT_NOREF(bRm);
6888 IEMOP_BITCH_ABOUT_STUB();
6889 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6890#else
6891 IEM_MC_BEGIN(4, 3);
6892 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
6893 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
6894 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
6895 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6896 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
6897 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
6898 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6899
6900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6901 IEMOP_HLP_DONE_DECODING();
6902 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
6903 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6904
6905 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
6906 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
6907 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
6908
6909 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
6910 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
6911 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
6912
6913 IEM_MC_FETCH_EFLAGS(EFlags);
6914# ifdef RT_ARCH_AMD64
6915 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
6916 {
6917 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6918 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6919 else
6920 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6921 }
6922 else
6923# endif
6924 {
6925 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
6926 accesses and not all all atomic, which works fine on in UNI CPU guest
6927 configuration (ignoring DMA). If guest SMP is active we have no choice
6928 but to use a rendezvous callback here. Sigh. */
6929 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
6930 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6931 else
6932 {
6933 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6934 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
6935 }
6936 }
6937
6938 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
6939 IEM_MC_COMMIT_EFLAGS(EFlags);
6940 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6941 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
6942 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
6943 IEM_MC_ENDIF();
6944 IEM_MC_ADVANCE_RIP();
6945
6946 IEM_MC_END();
6947 return VINF_SUCCESS;
6948#endif
6949 }
6950 Log(("cmpxchg16b -> #UD\n"));
6951 return IEMOP_RAISE_INVALID_OPCODE();
6952}
6953
6954
6955/** Opcode 0x0f 0xc7 11/6. */
6956FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6957
6958/** Opcode 0x0f 0xc7 !11/6. */
6959FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6960
6961/** Opcode 0x66 0x0f 0xc7 !11/6. */
6962FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6963
6964/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6965FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6966
6967/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6968FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6969
6970
6971/** Opcode 0x0f 0xc7. */
6972FNIEMOP_DEF(iemOp_Grp9)
6973{
6974 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6975 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6976 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6977 {
6978 case 0: case 2: case 3: case 4: case 5:
6979 return IEMOP_RAISE_INVALID_OPCODE();
6980 case 1:
6981 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6982 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6983 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6984 return IEMOP_RAISE_INVALID_OPCODE();
6985 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6986 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6987 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6988 case 6:
6989 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6990 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6991 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6992 {
6993 case 0:
6994 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6995 case IEM_OP_PRF_SIZE_OP:
6996 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6997 case IEM_OP_PRF_REPZ:
6998 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6999 default:
7000 return IEMOP_RAISE_INVALID_OPCODE();
7001 }
7002 case 7:
7003 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7004 {
7005 case 0:
7006 case IEM_OP_PRF_REPZ:
7007 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
7008 default:
7009 return IEMOP_RAISE_INVALID_OPCODE();
7010 }
7011 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7012 }
7013}
7014
7015
7016/**
7017 * Common 'bswap register' helper.
7018 */
7019FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7020{
7021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7022 switch (pVCpu->iem.s.enmEffOpSize)
7023 {
7024 case IEMMODE_16BIT:
7025 IEM_MC_BEGIN(1, 0);
7026 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7027 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7028 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7029 IEM_MC_ADVANCE_RIP();
7030 IEM_MC_END();
7031 return VINF_SUCCESS;
7032
7033 case IEMMODE_32BIT:
7034 IEM_MC_BEGIN(1, 0);
7035 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7036 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7037 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7038 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7039 IEM_MC_ADVANCE_RIP();
7040 IEM_MC_END();
7041 return VINF_SUCCESS;
7042
7043 case IEMMODE_64BIT:
7044 IEM_MC_BEGIN(1, 0);
7045 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7046 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7047 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7048 IEM_MC_ADVANCE_RIP();
7049 IEM_MC_END();
7050 return VINF_SUCCESS;
7051
7052 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7053 }
7054}
7055
7056
7057/** Opcode 0x0f 0xc8. */
7058FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7059{
7060 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7061 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7062 prefix. REX.B is the correct prefix it appears. For a parallel
7063 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7064 IEMOP_HLP_MIN_486();
7065 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7066}
7067
7068
7069/** Opcode 0x0f 0xc9. */
7070FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7071{
7072 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7073 IEMOP_HLP_MIN_486();
7074 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7075}
7076
7077
7078/** Opcode 0x0f 0xca. */
7079FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7080{
7081 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7082 IEMOP_HLP_MIN_486();
7083 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7084}
7085
7086
7087/** Opcode 0x0f 0xcb. */
7088FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7089{
7090 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7091 IEMOP_HLP_MIN_486();
7092 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7093}
7094
7095
7096/** Opcode 0x0f 0xcc. */
7097FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7098{
7099 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7100 IEMOP_HLP_MIN_486();
7101 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7102}
7103
7104
7105/** Opcode 0x0f 0xcd. */
7106FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7107{
7108 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7109 IEMOP_HLP_MIN_486();
7110 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7111}
7112
7113
7114/** Opcode 0x0f 0xce. */
7115FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7116{
7117 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7118 IEMOP_HLP_MIN_486();
7119 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7120}
7121
7122
7123/** Opcode 0x0f 0xcf. */
7124FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7125{
7126 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7127 IEMOP_HLP_MIN_486();
7128 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7129}
7130
7131
7132/* Opcode 0x0f 0xd0 - invalid */
7133/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7134FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7135/* Opcode 0xf3 0x0f 0xd0 - invalid */
7136/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7137FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7138
7139/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7140FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7141/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7142FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7143/* Opcode 0xf3 0x0f 0xd1 - invalid */
7144/* Opcode 0xf2 0x0f 0xd1 - invalid */
7145
7146/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7147FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7148/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7149FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7150/* Opcode 0xf3 0x0f 0xd2 - invalid */
7151/* Opcode 0xf2 0x0f 0xd2 - invalid */
7152
7153/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7154FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7155/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7156FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7157/* Opcode 0xf3 0x0f 0xd3 - invalid */
7158/* Opcode 0xf2 0x0f 0xd3 - invalid */
7159
7160/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7161FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7162/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7163FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7164/* Opcode 0xf3 0x0f 0xd4 - invalid */
7165/* Opcode 0xf2 0x0f 0xd4 - invalid */
7166
7167/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7168FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7169/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7170FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7171/* Opcode 0xf3 0x0f 0xd5 - invalid */
7172/* Opcode 0xf2 0x0f 0xd5 - invalid */
7173
7174/* Opcode 0x0f 0xd6 - invalid */
7175/** Opcode 0x66 0x0f 0xd6 - vmovq Wq, Vq */
7176FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
7177/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7178FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7179/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7180FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7181#if 0
7182FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7183{
7184 /* Docs says register only. */
7185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7186
7187 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7188 {
7189 case IEM_OP_PRF_SIZE_OP: /* SSE */
7190 IEMOP_MNEMONIC(movq_Wq_Vq, "movq Wq,Vq");
7191 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7192 IEM_MC_BEGIN(2, 0);
7193 IEM_MC_ARG(uint64_t *, pDst, 0);
7194 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7195 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7196 IEM_MC_PREPARE_SSE_USAGE();
7197 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7198 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7199 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7200 IEM_MC_ADVANCE_RIP();
7201 IEM_MC_END();
7202 return VINF_SUCCESS;
7203
7204 case 0: /* MMX */
7205 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7206 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7207 IEM_MC_BEGIN(2, 0);
7208 IEM_MC_ARG(uint64_t *, pDst, 0);
7209 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7210 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7211 IEM_MC_PREPARE_FPU_USAGE();
7212 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7213 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7214 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7215 IEM_MC_ADVANCE_RIP();
7216 IEM_MC_END();
7217 return VINF_SUCCESS;
7218
7219 default:
7220 return IEMOP_RAISE_INVALID_OPCODE();
7221 }
7222}
7223#endif
7224
7225
7226/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
7227FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
7228{
7229 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7230 /** @todo testcase: Check that the instruction implicitly clears the high
7231 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7232 * and opcode modifications are made to work with the whole width (not
7233 * just 128). */
7234 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
7235 /* Docs says register only. */
7236 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7237 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7238 {
7239 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7240 IEM_MC_BEGIN(2, 0);
7241 IEM_MC_ARG(uint64_t *, pDst, 0);
7242 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7243 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7244 IEM_MC_PREPARE_FPU_USAGE();
7245 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7246 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7247 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7248 IEM_MC_ADVANCE_RIP();
7249 IEM_MC_END();
7250 return VINF_SUCCESS;
7251 }
7252 return IEMOP_RAISE_INVALID_OPCODE();
7253}
7254
7255/** Opcode 0x66 0x0f 0xd7 - */
7256FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
7257{
7258 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7259 /** @todo testcase: Check that the instruction implicitly clears the high
7260 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7261 * and opcode modifications are made to work with the whole width (not
7262 * just 128). */
7263 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
7264 /* Docs says register only. */
7265 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7266 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7267 {
7268 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7269 IEM_MC_BEGIN(2, 0);
7270 IEM_MC_ARG(uint64_t *, pDst, 0);
7271 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7272 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7273 IEM_MC_PREPARE_SSE_USAGE();
7274 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7275 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7276 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7277 IEM_MC_ADVANCE_RIP();
7278 IEM_MC_END();
7279 return VINF_SUCCESS;
7280 }
7281 return IEMOP_RAISE_INVALID_OPCODE();
7282}
7283
7284/* Opcode 0xf3 0x0f 0xd7 - invalid */
7285/* Opcode 0xf2 0x0f 0xd7 - invalid */
7286
7287
7288/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7289FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7290/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
7291FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
7292/* Opcode 0xf3 0x0f 0xd8 - invalid */
7293/* Opcode 0xf2 0x0f 0xd8 - invalid */
7294
7295/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7296FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7297/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
7298FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
7299/* Opcode 0xf3 0x0f 0xd9 - invalid */
7300/* Opcode 0xf2 0x0f 0xd9 - invalid */
7301
7302/** Opcode 0x0f 0xda - pminub Pq, Qq */
7303FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7304/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
7305FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
7306/* Opcode 0xf3 0x0f 0xda - invalid */
7307/* Opcode 0xf2 0x0f 0xda - invalid */
7308
7309/** Opcode 0x0f 0xdb - pand Pq, Qq */
7310FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7311/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
7312FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
7313/* Opcode 0xf3 0x0f 0xdb - invalid */
7314/* Opcode 0xf2 0x0f 0xdb - invalid */
7315
7316/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7317FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7318/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
7319FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
7320/* Opcode 0xf3 0x0f 0xdc - invalid */
7321/* Opcode 0xf2 0x0f 0xdc - invalid */
7322
7323/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7324FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7325/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
7326FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
7327/* Opcode 0xf3 0x0f 0xdd - invalid */
7328/* Opcode 0xf2 0x0f 0xdd - invalid */
7329
7330/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7331FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7332/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
7333FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
7334/* Opcode 0xf3 0x0f 0xde - invalid */
7335/* Opcode 0xf2 0x0f 0xde - invalid */
7336
7337/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7338FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7339/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
7340FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
7341/* Opcode 0xf3 0x0f 0xdf - invalid */
7342/* Opcode 0xf2 0x0f 0xdf - invalid */
7343
7344/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7345FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7346/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
7347FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
7348/* Opcode 0xf3 0x0f 0xe0 - invalid */
7349/* Opcode 0xf2 0x0f 0xe0 - invalid */
7350
7351/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7352FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7353/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
7354FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
7355/* Opcode 0xf3 0x0f 0xe1 - invalid */
7356/* Opcode 0xf2 0x0f 0xe1 - invalid */
7357
7358/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7359FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7360/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
7361FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
7362/* Opcode 0xf3 0x0f 0xe2 - invalid */
7363/* Opcode 0xf2 0x0f 0xe2 - invalid */
7364
7365/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7366FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7367/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
7368FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
7369/* Opcode 0xf3 0x0f 0xe3 - invalid */
7370/* Opcode 0xf2 0x0f 0xe3 - invalid */
7371
7372/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7373FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7374/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
7375FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
7376/* Opcode 0xf3 0x0f 0xe4 - invalid */
7377/* Opcode 0xf2 0x0f 0xe4 - invalid */
7378
7379/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7380FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7381/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
7382FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
7383/* Opcode 0xf3 0x0f 0xe5 - invalid */
7384/* Opcode 0xf2 0x0f 0xe5 - invalid */
7385
7386/* Opcode 0x0f 0xe6 - invalid */
7387/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
7388FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
7389/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
7390FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
7391/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
7392FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
7393
7394
7395/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
7396FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
7397{
7398 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7399 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7400 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7401 {
7402 /* Register, memory. */
7403 IEM_MC_BEGIN(0, 2);
7404 IEM_MC_LOCAL(uint64_t, uSrc);
7405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7406
7407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7409 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7410 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7411
7412 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7413 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7414
7415 IEM_MC_ADVANCE_RIP();
7416 IEM_MC_END();
7417 return VINF_SUCCESS;
7418 }
7419 /* The register, register encoding is invalid. */
7420 return IEMOP_RAISE_INVALID_OPCODE();
7421}
7422
7423/** Opcode 0x66 0x0f 0xe7 - vmovntdq Mx, Vx */
7424FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
7425{
7426 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7427 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7428 {
7429 /* Register, memory. */
7430 IEMOP_MNEMONIC(vmovntdq_Mx_Vx, "vmovntdq Mx,Vx");
7431 IEM_MC_BEGIN(0, 2);
7432 IEM_MC_LOCAL(uint128_t, uSrc);
7433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7434
7435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7437 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7438 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7439
7440 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7441 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7442
7443 IEM_MC_ADVANCE_RIP();
7444 IEM_MC_END();
7445 return VINF_SUCCESS;
7446 }
7447
7448 /* The register, register encoding is invalid. */
7449 return IEMOP_RAISE_INVALID_OPCODE();
7450}
7451
7452/* Opcode 0xf3 0x0f 0xe7 - invalid */
7453/* Opcode 0xf2 0x0f 0xe7 - invalid */
7454
7455
7456/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
7457FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
7458/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
7459FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
7460/* Opcode 0xf3 0x0f 0xe8 - invalid */
7461/* Opcode 0xf2 0x0f 0xe8 - invalid */
7462
7463/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
7464FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
7465/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
7466FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
7467/* Opcode 0xf3 0x0f 0xe9 - invalid */
7468/* Opcode 0xf2 0x0f 0xe9 - invalid */
7469
7470/** Opcode 0x0f 0xea - pminsw Pq, Qq */
7471FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
7472/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
7473FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
7474/* Opcode 0xf3 0x0f 0xea - invalid */
7475/* Opcode 0xf2 0x0f 0xea - invalid */
7476
7477/** Opcode 0x0f 0xeb - por Pq, Qq */
7478FNIEMOP_STUB(iemOp_por_Pq_Qq);
7479/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
7480FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
7481/* Opcode 0xf3 0x0f 0xeb - invalid */
7482/* Opcode 0xf2 0x0f 0xeb - invalid */
7483
7484/** Opcode 0x0f 0xec - paddsb Pq, Qq */
7485FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
7486/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
7487FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
7488/* Opcode 0xf3 0x0f 0xec - invalid */
7489/* Opcode 0xf2 0x0f 0xec - invalid */
7490
7491/** Opcode 0x0f 0xed - paddsw Pq, Qq */
7492FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
7493/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
7494FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
7495/* Opcode 0xf3 0x0f 0xed - invalid */
7496/* Opcode 0xf2 0x0f 0xed - invalid */
7497
7498/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
7499FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
7500/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
7501FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
7502/* Opcode 0xf3 0x0f 0xee - invalid */
7503/* Opcode 0xf2 0x0f 0xee - invalid */
7504
7505
7506/** Opcode 0x0f 0xef - pxor Pq, Qq */
7507FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
7508{
7509 IEMOP_MNEMONIC(pxor, "pxor");
7510 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
7511}
7512
7513/** Opcode 0x66 0x0f 0xef - vpxor Vx, Hx, Wx */
7514FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
7515{
7516 IEMOP_MNEMONIC(vpxor, "vpxor");
7517 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7518}
7519
7520/* Opcode 0xf3 0x0f 0xef - invalid */
7521/* Opcode 0xf2 0x0f 0xef - invalid */
7522
7523/* Opcode 0x0f 0xf0 - invalid */
7524/* Opcode 0x66 0x0f 0xf0 - invalid */
7525/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
7526FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
7527
7528/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
7529FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
7530/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
7531FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
7532/* Opcode 0xf2 0x0f 0xf1 - invalid */
7533
7534/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
7535FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
7536/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
7537FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
7538/* Opcode 0xf2 0x0f 0xf2 - invalid */
7539
7540/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
7541FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
7542/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
7543FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
7544/* Opcode 0xf2 0x0f 0xf3 - invalid */
7545
7546/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
7547FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
7548/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
7549FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
7550/* Opcode 0xf2 0x0f 0xf4 - invalid */
7551
7552/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
7553FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
7554/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
7555FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
7556/* Opcode 0xf2 0x0f 0xf5 - invalid */
7557
7558/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
7559FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
7560/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
7561FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
7562/* Opcode 0xf2 0x0f 0xf6 - invalid */
7563
7564/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
7565FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
7566/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
7567FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
7568/* Opcode 0xf2 0x0f 0xf7 - invalid */
7569
7570/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
7571FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
7572/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
7573FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
7574/* Opcode 0xf2 0x0f 0xf8 - invalid */
7575
7576/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
7577FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
7578/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
7579FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
7580/* Opcode 0xf2 0x0f 0xf9 - invalid */
7581
7582/** Opcode 0x0f 0xfa - psubd Pq, Qq */
7583FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
7584/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
7585FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
7586/* Opcode 0xf2 0x0f 0xfa - invalid */
7587
7588/** Opcode 0x0f 0xfb - psubq Pq, Qq */
7589FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
7590/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
7591FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
7592/* Opcode 0xf2 0x0f 0xfb - invalid */
7593
7594/** Opcode 0x0f 0xfc - paddb Pq, Qq */
7595FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
7596/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
7597FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
7598/* Opcode 0xf2 0x0f 0xfc - invalid */
7599
7600/** Opcode 0x0f 0xfd - paddw Pq, Qq */
7601FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
7602/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
7603FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
7604/* Opcode 0xf2 0x0f 0xfd - invalid */
7605
7606/** Opcode 0x0f 0xfe - paddd Pq, Qq */
7607FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
7608/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
7609FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
7610/* Opcode 0xf2 0x0f 0xfe - invalid */
7611
7612
7613/** Opcode **** 0x0f 0xff - UD0 */
7614FNIEMOP_DEF(iemOp_ud0)
7615{
7616 IEMOP_MNEMONIC(ud0, "ud0");
7617 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
7618 {
7619 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
7620#ifndef TST_IEM_CHECK_MC
7621 RTGCPTR GCPtrEff;
7622 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
7623 if (rcStrict != VINF_SUCCESS)
7624 return rcStrict;
7625#endif
7626 IEMOP_HLP_DONE_DECODING();
7627 }
7628 return IEMOP_RAISE_INVALID_OPCODE();
7629}
7630
7631
7632
7633/**
7634 * Two byte opcode map, first byte 0x0f.
7635 *
7636 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
7637 * check if it needs updating as well when making changes.
7638 */
7639IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
7640{
7641 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7642 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
7643 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
7644 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
7645 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
7646 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
7647 /* 0x05 */ IEMOP_X4(iemOp_syscall),
7648 /* 0x06 */ IEMOP_X4(iemOp_clts),
7649 /* 0x07 */ IEMOP_X4(iemOp_sysret),
7650 /* 0x08 */ IEMOP_X4(iemOp_invd),
7651 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
7652 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
7653 /* 0x0b */ IEMOP_X4(iemOp_ud2),
7654 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
7655 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
7656 /* 0x0e */ IEMOP_X4(iemOp_femms),
7657 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
7658
7659 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
7660 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
7661 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7662 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7663 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7664 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7665 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7666 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7667 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
7668 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
7669 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
7670 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
7671 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
7672 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
7673 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
7674 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
7675
7676 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
7677 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
7678 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
7679 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
7680 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
7681 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7682 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
7683 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7684 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7685 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7686 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
7687 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7688 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
7689 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
7690 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7691 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7692
7693 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
7694 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
7695 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
7696 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
7697 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
7698 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
7699 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
7700 /* 0x37 */ IEMOP_X4(iemOp_getsec),
7701 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
7702 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7703 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
7704 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7705 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7706 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7707 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7708 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7709
7710 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
7711 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
7712 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
7713 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
7714 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
7715 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
7716 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
7717 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
7718 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
7719 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
7720 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
7721 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
7722 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
7723 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
7724 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
7725 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
7726
7727 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7728 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
7729 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7730 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7731 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7732 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7733 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7734 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7735 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
7736 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
7737 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
7738 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
7739 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
7740 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
7741 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
7742 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
7743
7744 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7745 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7746 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7747 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7748 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7749 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7750 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7751 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7752 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7753 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7754 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7755 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7756 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7757 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7758 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7759 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
7760
7761 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
7762 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
7763 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
7764 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
7765 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7766 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7767 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7768 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7769
7770 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7771 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7772 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7773 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7774 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
7775 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
7776 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
7777 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
7778
7779 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
7780 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
7781 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
7782 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
7783 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
7784 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
7785 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
7786 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
7787 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
7788 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
7789 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
7790 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
7791 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
7792 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
7793 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
7794 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
7795
7796 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
7797 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
7798 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
7799 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
7800 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
7801 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
7802 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
7803 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
7804 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
7805 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
7806 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
7807 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
7808 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
7809 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
7810 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
7811 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
7812
7813 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
7814 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
7815 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
7816 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
7817 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
7818 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
7819 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
7820 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
7821 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
7822 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
7823 /* 0xaa */ IEMOP_X4(iemOp_rsm),
7824 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
7825 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
7826 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
7827 /* 0xae */ IEMOP_X4(iemOp_Grp15),
7828 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
7829
7830 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
7831 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
7832 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
7833 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
7834 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
7835 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
7836 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
7837 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
7838 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
7839 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
7840 /* 0xba */ IEMOP_X4(iemOp_Grp8),
7841 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
7842 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
7843 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
7844 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
7845 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
7846
7847 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
7848 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
7849 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
7850 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7851 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7852 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7853 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
7854 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
7855 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
7856 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
7857 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
7858 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
7859 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
7860 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
7861 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
7862 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
7863
7864 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
7865 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7866 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7867 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7868 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7869 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7870 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
7871 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7872 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7873 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7874 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7875 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7876 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7877 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7878 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7879 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7880
7881 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7882 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7883 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7884 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7885 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7886 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7887 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
7888 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7889 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7890 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7891 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7892 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7893 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7894 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7895 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7896 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7897
7898 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
7899 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7900 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7901 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7902 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7903 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7904 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7905 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7906 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7907 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7908 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7909 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7910 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7911 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7912 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7913 /* 0xff */ IEMOP_X4(iemOp_ud0),
7914};
7915AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
7916
7917
7918/**
7919 * VEX opcode map \#1.
7920 *
7921 * @remarks This is (currently) a subset of g_apfnTwoByteMap, so please check if
7922 * it it needs updating too when making changes.
7923 */
7924IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =
7925{
7926 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7927 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
7928 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
7929 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
7930 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
7931 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
7932 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
7933 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
7934 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
7935 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
7936 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
7937 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
7938 /* 0x0b */ IEMOP_X4(iemOp_InvalidNeedRM),
7939 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
7940 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
7941 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
7942 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
7943
7944 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
7945 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
7946 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7947 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7948 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7949 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7950 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7951 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7952 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
7953 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
7954 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
7955 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
7956 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
7957 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
7958 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
7959 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
7960
7961 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
7962 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
7963 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
7964 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
7965 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
7966 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
7967 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
7968 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
7969 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7970 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7971 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
7972 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7973 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
7974 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
7975 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7976 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7977
7978 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
7979 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
7980 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
7981 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
7982 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
7983 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
7984 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
7985 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
7986 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7987 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7988 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7989 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7990 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7991 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7992 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7993 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7994
7995 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
7996 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
7997 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
7998 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
7999 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
8000 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
8001 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
8002 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
8003 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
8004 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
8005 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
8006 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
8007 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
8008 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
8009 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
8010 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
8011
8012 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8013 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
8014 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8015 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8016 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8017 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8018 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8019 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8020 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8021 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8022 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8023 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8024 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8025 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8026 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8027 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8028
8029 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8030 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8031 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8032 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8033 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8034 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8035 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8036 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8037 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8038 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8039 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8040 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8041 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8042 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8043 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8044 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
8045
8046 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
8047 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_Grp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8048 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_Grp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8049 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_Grp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8050 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8051 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8052 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8053 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8054 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
8055 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
8056 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
8057 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
8058 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8059 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8060 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
8061 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
8062
8063 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
8064 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
8065 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
8066 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
8067 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
8068 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
8069 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
8070 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
8071 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
8072 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
8073 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
8074 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
8075 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
8076 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
8077 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
8078 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
8079 IEMOP_X4(iemOp_InvalidNeedRM),
8080 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
8081 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
8082 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
8083 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
8084 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
8085 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
8086 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
8087 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
8088 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
8089 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
8090 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
8091 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
8092 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
8093 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
8094 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
8095 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
8096
8097 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8098 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8099 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8100 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8101 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8102 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8103 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8104 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8105 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8106 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8107 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
8108 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
8109 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
8110 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
8111 /* 0xae */ IEMOP_X4(iemOp_Grp15), /** @todo groups and vex */
8112 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
8113
8114 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8115 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8116 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8117 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8118 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8119 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8120 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8121 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8122 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8123 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8124 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
8125 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
8126 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
8127 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
8128 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
8129 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
8130
8131 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8132 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8133 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8134 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8135 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8136 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8137 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8138 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8139 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8140 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8141 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
8142 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
8143 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
8144 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
8145 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
8146 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
8147
8148 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8149 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8150 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8151 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8152 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8153 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8154 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8155 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8156 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8157 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8158 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8159 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8160 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8161 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8162 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8163 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8164
8165 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8166 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8167 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8168 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8169 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8170 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8171 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8172 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8173 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8174 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8175 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8176 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8177 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8178 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8179 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8180 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8181
8182 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8183 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8184 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8185 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8186 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8187 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8188 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8189 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8190 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8191 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8192 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8193 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8194 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8195 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8196 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8197 /* 0xff */ IEMOP_X4(iemOp_ud0),
8198};
8199AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8200/** @} */
8201
8202
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette