VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 65775

Last change on this file since 65775 was 65775, checked in by vboxsync, 8 years ago

IEM: Use prefix indexed tables for group 14.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 305.8 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 65775 2017-02-13 16:01:39Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/** @name ..... opcodes.
26 *
27 * @{
28 */
29
30/** @} */
31
32
33/** @name Two byte opcodes (first byte 0x0f).
34 *
35 * @{
36 */
37
38/** Opcode 0x0f 0x00 /0. */
39FNIEMOPRM_DEF(iemOp_Grp6_sldt)
40{
41 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
42 IEMOP_HLP_MIN_286();
43 IEMOP_HLP_NO_REAL_OR_V86_MODE();
44
45 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
46 {
47 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
48 switch (pVCpu->iem.s.enmEffOpSize)
49 {
50 case IEMMODE_16BIT:
51 IEM_MC_BEGIN(0, 1);
52 IEM_MC_LOCAL(uint16_t, u16Ldtr);
53 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
54 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
55 IEM_MC_ADVANCE_RIP();
56 IEM_MC_END();
57 break;
58
59 case IEMMODE_32BIT:
60 IEM_MC_BEGIN(0, 1);
61 IEM_MC_LOCAL(uint32_t, u32Ldtr);
62 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
63 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
64 IEM_MC_ADVANCE_RIP();
65 IEM_MC_END();
66 break;
67
68 case IEMMODE_64BIT:
69 IEM_MC_BEGIN(0, 1);
70 IEM_MC_LOCAL(uint64_t, u64Ldtr);
71 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
72 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
73 IEM_MC_ADVANCE_RIP();
74 IEM_MC_END();
75 break;
76
77 IEM_NOT_REACHED_DEFAULT_CASE_RET();
78 }
79 }
80 else
81 {
82 IEM_MC_BEGIN(0, 2);
83 IEM_MC_LOCAL(uint16_t, u16Ldtr);
84 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
85 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
86 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
87 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
88 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
89 IEM_MC_ADVANCE_RIP();
90 IEM_MC_END();
91 }
92 return VINF_SUCCESS;
93}
94
95
96/** Opcode 0x0f 0x00 /1. */
97FNIEMOPRM_DEF(iemOp_Grp6_str)
98{
99 IEMOP_MNEMONIC(str, "str Rv/Mw");
100 IEMOP_HLP_MIN_286();
101 IEMOP_HLP_NO_REAL_OR_V86_MODE();
102
103 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
104 {
105 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
106 switch (pVCpu->iem.s.enmEffOpSize)
107 {
108 case IEMMODE_16BIT:
109 IEM_MC_BEGIN(0, 1);
110 IEM_MC_LOCAL(uint16_t, u16Tr);
111 IEM_MC_FETCH_TR_U16(u16Tr);
112 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
113 IEM_MC_ADVANCE_RIP();
114 IEM_MC_END();
115 break;
116
117 case IEMMODE_32BIT:
118 IEM_MC_BEGIN(0, 1);
119 IEM_MC_LOCAL(uint32_t, u32Tr);
120 IEM_MC_FETCH_TR_U32(u32Tr);
121 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
122 IEM_MC_ADVANCE_RIP();
123 IEM_MC_END();
124 break;
125
126 case IEMMODE_64BIT:
127 IEM_MC_BEGIN(0, 1);
128 IEM_MC_LOCAL(uint64_t, u64Tr);
129 IEM_MC_FETCH_TR_U64(u64Tr);
130 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
131 IEM_MC_ADVANCE_RIP();
132 IEM_MC_END();
133 break;
134
135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
136 }
137 }
138 else
139 {
140 IEM_MC_BEGIN(0, 2);
141 IEM_MC_LOCAL(uint16_t, u16Tr);
142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
144 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
145 IEM_MC_FETCH_TR_U16(u16Tr);
146 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
147 IEM_MC_ADVANCE_RIP();
148 IEM_MC_END();
149 }
150 return VINF_SUCCESS;
151}
152
153
154/** Opcode 0x0f 0x00 /2. */
155FNIEMOPRM_DEF(iemOp_Grp6_lldt)
156{
157 IEMOP_MNEMONIC(lldt, "lldt Ew");
158 IEMOP_HLP_MIN_286();
159 IEMOP_HLP_NO_REAL_OR_V86_MODE();
160
161 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
162 {
163 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
164 IEM_MC_BEGIN(1, 0);
165 IEM_MC_ARG(uint16_t, u16Sel, 0);
166 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
167 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
168 IEM_MC_END();
169 }
170 else
171 {
172 IEM_MC_BEGIN(1, 1);
173 IEM_MC_ARG(uint16_t, u16Sel, 0);
174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
176 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
177 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
178 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
179 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
180 IEM_MC_END();
181 }
182 return VINF_SUCCESS;
183}
184
185
186/** Opcode 0x0f 0x00 /3. */
187FNIEMOPRM_DEF(iemOp_Grp6_ltr)
188{
189 IEMOP_MNEMONIC(ltr, "ltr Ew");
190 IEMOP_HLP_MIN_286();
191 IEMOP_HLP_NO_REAL_OR_V86_MODE();
192
193 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
194 {
195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
196 IEM_MC_BEGIN(1, 0);
197 IEM_MC_ARG(uint16_t, u16Sel, 0);
198 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
199 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
200 IEM_MC_END();
201 }
202 else
203 {
204 IEM_MC_BEGIN(1, 1);
205 IEM_MC_ARG(uint16_t, u16Sel, 0);
206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
209 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
210 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
211 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
212 IEM_MC_END();
213 }
214 return VINF_SUCCESS;
215}
216
217
218/** Opcode 0x0f 0x00 /3. */
219FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
220{
221 IEMOP_HLP_MIN_286();
222 IEMOP_HLP_NO_REAL_OR_V86_MODE();
223
224 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
225 {
226 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
227 IEM_MC_BEGIN(2, 0);
228 IEM_MC_ARG(uint16_t, u16Sel, 0);
229 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
230 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
231 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
232 IEM_MC_END();
233 }
234 else
235 {
236 IEM_MC_BEGIN(2, 1);
237 IEM_MC_ARG(uint16_t, u16Sel, 0);
238 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
241 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
242 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
243 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
244 IEM_MC_END();
245 }
246 return VINF_SUCCESS;
247}
248
249
250/** Opcode 0x0f 0x00 /4. */
251FNIEMOPRM_DEF(iemOp_Grp6_verr)
252{
253 IEMOP_MNEMONIC(verr, "verr Ew");
254 IEMOP_HLP_MIN_286();
255 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
256}
257
258
259/** Opcode 0x0f 0x00 /5. */
260FNIEMOPRM_DEF(iemOp_Grp6_verw)
261{
262 IEMOP_MNEMONIC(verw, "verw Ew");
263 IEMOP_HLP_MIN_286();
264 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
265}
266
267
268/**
269 * Group 6 jump table.
270 */
271IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
272{
273 iemOp_Grp6_sldt,
274 iemOp_Grp6_str,
275 iemOp_Grp6_lldt,
276 iemOp_Grp6_ltr,
277 iemOp_Grp6_verr,
278 iemOp_Grp6_verw,
279 iemOp_InvalidWithRM,
280 iemOp_InvalidWithRM
281};
282
283/** Opcode 0x0f 0x00. */
284FNIEMOP_DEF(iemOp_Grp6)
285{
286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
287 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
288}
289
290
291/** Opcode 0x0f 0x01 /0. */
292FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
293{
294 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
295 IEMOP_HLP_MIN_286();
296 IEMOP_HLP_64BIT_OP_SIZE();
297 IEM_MC_BEGIN(2, 1);
298 IEM_MC_ARG(uint8_t, iEffSeg, 0);
299 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
302 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
303 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
304 IEM_MC_END();
305 return VINF_SUCCESS;
306}
307
308
309/** Opcode 0x0f 0x01 /0. */
310FNIEMOP_DEF(iemOp_Grp7_vmcall)
311{
312 IEMOP_BITCH_ABOUT_STUB();
313 return IEMOP_RAISE_INVALID_OPCODE();
314}
315
316
317/** Opcode 0x0f 0x01 /0. */
318FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
319{
320 IEMOP_BITCH_ABOUT_STUB();
321 return IEMOP_RAISE_INVALID_OPCODE();
322}
323
324
325/** Opcode 0x0f 0x01 /0. */
326FNIEMOP_DEF(iemOp_Grp7_vmresume)
327{
328 IEMOP_BITCH_ABOUT_STUB();
329 return IEMOP_RAISE_INVALID_OPCODE();
330}
331
332
333/** Opcode 0x0f 0x01 /0. */
334FNIEMOP_DEF(iemOp_Grp7_vmxoff)
335{
336 IEMOP_BITCH_ABOUT_STUB();
337 return IEMOP_RAISE_INVALID_OPCODE();
338}
339
340
341/** Opcode 0x0f 0x01 /1. */
342FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
343{
344 IEMOP_MNEMONIC(sidt, "sidt Ms");
345 IEMOP_HLP_MIN_286();
346 IEMOP_HLP_64BIT_OP_SIZE();
347 IEM_MC_BEGIN(2, 1);
348 IEM_MC_ARG(uint8_t, iEffSeg, 0);
349 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
352 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
353 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
354 IEM_MC_END();
355 return VINF_SUCCESS;
356}
357
358
359/** Opcode 0x0f 0x01 /1. */
360FNIEMOP_DEF(iemOp_Grp7_monitor)
361{
362 IEMOP_MNEMONIC(monitor, "monitor");
363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
364 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
365}
366
367
368/** Opcode 0x0f 0x01 /1. */
369FNIEMOP_DEF(iemOp_Grp7_mwait)
370{
371 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
373 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
374}
375
376
377/** Opcode 0x0f 0x01 /2. */
378FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
379{
380 IEMOP_MNEMONIC(lgdt, "lgdt");
381 IEMOP_HLP_64BIT_OP_SIZE();
382 IEM_MC_BEGIN(3, 1);
383 IEM_MC_ARG(uint8_t, iEffSeg, 0);
384 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
385 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
388 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
389 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
390 IEM_MC_END();
391 return VINF_SUCCESS;
392}
393
394
395/** Opcode 0x0f 0x01 0xd0. */
396FNIEMOP_DEF(iemOp_Grp7_xgetbv)
397{
398 IEMOP_MNEMONIC(xgetbv, "xgetbv");
399 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
400 {
401 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
402 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
403 }
404 return IEMOP_RAISE_INVALID_OPCODE();
405}
406
407
408/** Opcode 0x0f 0x01 0xd1. */
409FNIEMOP_DEF(iemOp_Grp7_xsetbv)
410{
411 IEMOP_MNEMONIC(xsetbv, "xsetbv");
412 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
413 {
414 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
416 }
417 return IEMOP_RAISE_INVALID_OPCODE();
418}
419
420
421/** Opcode 0x0f 0x01 /3. */
422FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
423{
424 IEMOP_MNEMONIC(lidt, "lidt");
425 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
426 ? IEMMODE_64BIT
427 : pVCpu->iem.s.enmEffOpSize;
428 IEM_MC_BEGIN(3, 1);
429 IEM_MC_ARG(uint8_t, iEffSeg, 0);
430 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
431 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
434 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
435 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
436 IEM_MC_END();
437 return VINF_SUCCESS;
438}
439
440
441/** Opcode 0x0f 0x01 0xd8. */
442FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
443
444/** Opcode 0x0f 0x01 0xd9. */
445FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
446
447/** Opcode 0x0f 0x01 0xda. */
448FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
449
450/** Opcode 0x0f 0x01 0xdb. */
451FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
452
453/** Opcode 0x0f 0x01 0xdc. */
454FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
455
456/** Opcode 0x0f 0x01 0xdd. */
457FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
458
459/** Opcode 0x0f 0x01 0xde. */
460FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
461
462/** Opcode 0x0f 0x01 0xdf. */
463FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
464
465/** Opcode 0x0f 0x01 /4. */
466FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
467{
468 IEMOP_MNEMONIC(smsw, "smsw");
469 IEMOP_HLP_MIN_286();
470 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
471 {
472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
473 switch (pVCpu->iem.s.enmEffOpSize)
474 {
475 case IEMMODE_16BIT:
476 IEM_MC_BEGIN(0, 1);
477 IEM_MC_LOCAL(uint16_t, u16Tmp);
478 IEM_MC_FETCH_CR0_U16(u16Tmp);
479 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
480 { /* likely */ }
481 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
482 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
483 else
484 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
485 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
486 IEM_MC_ADVANCE_RIP();
487 IEM_MC_END();
488 return VINF_SUCCESS;
489
490 case IEMMODE_32BIT:
491 IEM_MC_BEGIN(0, 1);
492 IEM_MC_LOCAL(uint32_t, u32Tmp);
493 IEM_MC_FETCH_CR0_U32(u32Tmp);
494 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
495 IEM_MC_ADVANCE_RIP();
496 IEM_MC_END();
497 return VINF_SUCCESS;
498
499 case IEMMODE_64BIT:
500 IEM_MC_BEGIN(0, 1);
501 IEM_MC_LOCAL(uint64_t, u64Tmp);
502 IEM_MC_FETCH_CR0_U64(u64Tmp);
503 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
504 IEM_MC_ADVANCE_RIP();
505 IEM_MC_END();
506 return VINF_SUCCESS;
507
508 IEM_NOT_REACHED_DEFAULT_CASE_RET();
509 }
510 }
511 else
512 {
513 /* Ignore operand size here, memory refs are always 16-bit. */
514 IEM_MC_BEGIN(0, 2);
515 IEM_MC_LOCAL(uint16_t, u16Tmp);
516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
519 IEM_MC_FETCH_CR0_U16(u16Tmp);
520 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
521 { /* likely */ }
522 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
523 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
524 else
525 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
526 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
527 IEM_MC_ADVANCE_RIP();
528 IEM_MC_END();
529 return VINF_SUCCESS;
530 }
531}
532
533
534/** Opcode 0x0f 0x01 /6. */
535FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
536{
537 /* The operand size is effectively ignored, all is 16-bit and only the
538 lower 3-bits are used. */
539 IEMOP_MNEMONIC(lmsw, "lmsw");
540 IEMOP_HLP_MIN_286();
541 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
542 {
543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
544 IEM_MC_BEGIN(1, 0);
545 IEM_MC_ARG(uint16_t, u16Tmp, 0);
546 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
547 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
548 IEM_MC_END();
549 }
550 else
551 {
552 IEM_MC_BEGIN(1, 1);
553 IEM_MC_ARG(uint16_t, u16Tmp, 0);
554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
557 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
558 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
559 IEM_MC_END();
560 }
561 return VINF_SUCCESS;
562}
563
564
565/** Opcode 0x0f 0x01 /7. */
566FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
567{
568 IEMOP_MNEMONIC(invlpg, "invlpg");
569 IEMOP_HLP_MIN_486();
570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
571 IEM_MC_BEGIN(1, 1);
572 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
574 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
575 IEM_MC_END();
576 return VINF_SUCCESS;
577}
578
579
580/** Opcode 0x0f 0x01 /7. */
581FNIEMOP_DEF(iemOp_Grp7_swapgs)
582{
583 IEMOP_MNEMONIC(swapgs, "swapgs");
584 IEMOP_HLP_ONLY_64BIT();
585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
586 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
587}
588
589
590/** Opcode 0x0f 0x01 /7. */
591FNIEMOP_DEF(iemOp_Grp7_rdtscp)
592{
593 NOREF(pVCpu);
594 IEMOP_BITCH_ABOUT_STUB();
595 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
596}
597
598
599/**
600 * Group 7 jump table, memory variant.
601 */
602IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
603{
604 iemOp_Grp7_sgdt,
605 iemOp_Grp7_sidt,
606 iemOp_Grp7_lgdt,
607 iemOp_Grp7_lidt,
608 iemOp_Grp7_smsw,
609 iemOp_InvalidWithRM,
610 iemOp_Grp7_lmsw,
611 iemOp_Grp7_invlpg
612};
613
614
615/** Opcode 0x0f 0x01. */
616FNIEMOP_DEF(iemOp_Grp7)
617{
618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
619 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
620 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
621
622 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
623 {
624 case 0:
625 switch (bRm & X86_MODRM_RM_MASK)
626 {
627 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
628 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
629 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
630 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
631 }
632 return IEMOP_RAISE_INVALID_OPCODE();
633
634 case 1:
635 switch (bRm & X86_MODRM_RM_MASK)
636 {
637 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
638 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
639 }
640 return IEMOP_RAISE_INVALID_OPCODE();
641
642 case 2:
643 switch (bRm & X86_MODRM_RM_MASK)
644 {
645 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
646 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
647 }
648 return IEMOP_RAISE_INVALID_OPCODE();
649
650 case 3:
651 switch (bRm & X86_MODRM_RM_MASK)
652 {
653 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
654 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
655 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
656 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
657 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
658 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
659 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
660 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
661 IEM_NOT_REACHED_DEFAULT_CASE_RET();
662 }
663
664 case 4:
665 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
666
667 case 5:
668 return IEMOP_RAISE_INVALID_OPCODE();
669
670 case 6:
671 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
672
673 case 7:
674 switch (bRm & X86_MODRM_RM_MASK)
675 {
676 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
677 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
678 }
679 return IEMOP_RAISE_INVALID_OPCODE();
680
681 IEM_NOT_REACHED_DEFAULT_CASE_RET();
682 }
683}
684
685/** Opcode 0x0f 0x00 /3. */
686FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
687{
688 IEMOP_HLP_NO_REAL_OR_V86_MODE();
689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
690
691 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
692 {
693 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
694 switch (pVCpu->iem.s.enmEffOpSize)
695 {
696 case IEMMODE_16BIT:
697 {
698 IEM_MC_BEGIN(3, 0);
699 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
700 IEM_MC_ARG(uint16_t, u16Sel, 1);
701 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
702
703 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
704 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
705 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
706
707 IEM_MC_END();
708 return VINF_SUCCESS;
709 }
710
711 case IEMMODE_32BIT:
712 case IEMMODE_64BIT:
713 {
714 IEM_MC_BEGIN(3, 0);
715 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
716 IEM_MC_ARG(uint16_t, u16Sel, 1);
717 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
718
719 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
720 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
721 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
722
723 IEM_MC_END();
724 return VINF_SUCCESS;
725 }
726
727 IEM_NOT_REACHED_DEFAULT_CASE_RET();
728 }
729 }
730 else
731 {
732 switch (pVCpu->iem.s.enmEffOpSize)
733 {
734 case IEMMODE_16BIT:
735 {
736 IEM_MC_BEGIN(3, 1);
737 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
738 IEM_MC_ARG(uint16_t, u16Sel, 1);
739 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
741
742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
743 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
744
745 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
746 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
747 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
748
749 IEM_MC_END();
750 return VINF_SUCCESS;
751 }
752
753 case IEMMODE_32BIT:
754 case IEMMODE_64BIT:
755 {
756 IEM_MC_BEGIN(3, 1);
757 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
758 IEM_MC_ARG(uint16_t, u16Sel, 1);
759 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
761
762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
763 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
764/** @todo testcase: make sure it's a 16-bit read. */
765
766 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
767 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
768 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
769
770 IEM_MC_END();
771 return VINF_SUCCESS;
772 }
773
774 IEM_NOT_REACHED_DEFAULT_CASE_RET();
775 }
776 }
777}
778
779
780
781/** Opcode 0x0f 0x02. */
782FNIEMOP_DEF(iemOp_lar_Gv_Ew)
783{
784 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
785 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
786}
787
788
789/** Opcode 0x0f 0x03. */
790FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
791{
792 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
793 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
794}
795
796
797/** Opcode 0x0f 0x05. */
798FNIEMOP_DEF(iemOp_syscall)
799{
800 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
802 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
803}
804
805
806/** Opcode 0x0f 0x06. */
807FNIEMOP_DEF(iemOp_clts)
808{
809 IEMOP_MNEMONIC(clts, "clts");
810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
811 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
812}
813
814
815/** Opcode 0x0f 0x07. */
816FNIEMOP_DEF(iemOp_sysret)
817{
818 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
820 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
821}
822
823
824/** Opcode 0x0f 0x08. */
825FNIEMOP_STUB(iemOp_invd);
826// IEMOP_HLP_MIN_486();
827
828
829/** Opcode 0x0f 0x09. */
830FNIEMOP_DEF(iemOp_wbinvd)
831{
832 IEMOP_MNEMONIC(wbinvd, "wbinvd");
833 IEMOP_HLP_MIN_486();
834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
835 IEM_MC_BEGIN(0, 0);
836 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
837 IEM_MC_ADVANCE_RIP();
838 IEM_MC_END();
839 return VINF_SUCCESS; /* ignore for now */
840}
841
842
843/** Opcode 0x0f 0x0b. */
844FNIEMOP_DEF(iemOp_ud2)
845{
846 IEMOP_MNEMONIC(ud2, "ud2");
847 return IEMOP_RAISE_INVALID_OPCODE();
848}
849
850/** Opcode 0x0f 0x0d. */
851FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
852{
853 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
854 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
855 {
856 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
857 return IEMOP_RAISE_INVALID_OPCODE();
858 }
859
860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
861 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
862 {
863 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
864 return IEMOP_RAISE_INVALID_OPCODE();
865 }
866
867 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
868 {
869 case 2: /* Aliased to /0 for the time being. */
870 case 4: /* Aliased to /0 for the time being. */
871 case 5: /* Aliased to /0 for the time being. */
872 case 6: /* Aliased to /0 for the time being. */
873 case 7: /* Aliased to /0 for the time being. */
874 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
875 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
876 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
877 IEM_NOT_REACHED_DEFAULT_CASE_RET();
878 }
879
880 IEM_MC_BEGIN(0, 1);
881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
884 /* Currently a NOP. */
885 NOREF(GCPtrEffSrc);
886 IEM_MC_ADVANCE_RIP();
887 IEM_MC_END();
888 return VINF_SUCCESS;
889}
890
891
892/** Opcode 0x0f 0x0e. */
893FNIEMOP_STUB(iemOp_femms);
894
895
896/** Opcode 0x0f 0x0f 0x0c. */
897FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
898
899/** Opcode 0x0f 0x0f 0x0d. */
900FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
901
902/** Opcode 0x0f 0x0f 0x1c. */
903FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
904
905/** Opcode 0x0f 0x0f 0x1d. */
906FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
907
908/** Opcode 0x0f 0x0f 0x8a. */
909FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
910
911/** Opcode 0x0f 0x0f 0x8e. */
912FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
913
914/** Opcode 0x0f 0x0f 0x90. */
915FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
916
917/** Opcode 0x0f 0x0f 0x94. */
918FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
919
920/** Opcode 0x0f 0x0f 0x96. */
921FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
922
923/** Opcode 0x0f 0x0f 0x97. */
924FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
925
926/** Opcode 0x0f 0x0f 0x9a. */
927FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
928
929/** Opcode 0x0f 0x0f 0x9e. */
930FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
931
932/** Opcode 0x0f 0x0f 0xa0. */
933FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
934
935/** Opcode 0x0f 0x0f 0xa4. */
936FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
937
938/** Opcode 0x0f 0x0f 0xa6. */
939FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
940
941/** Opcode 0x0f 0x0f 0xa7. */
942FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
943
944/** Opcode 0x0f 0x0f 0xaa. */
945FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
946
947/** Opcode 0x0f 0x0f 0xae. */
948FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
949
950/** Opcode 0x0f 0x0f 0xb0. */
951FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
952
953/** Opcode 0x0f 0x0f 0xb4. */
954FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
955
956/** Opcode 0x0f 0x0f 0xb6. */
957FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
958
959/** Opcode 0x0f 0x0f 0xb7. */
960FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
961
962/** Opcode 0x0f 0x0f 0xbb. */
963FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
964
965/** Opcode 0x0f 0x0f 0xbf. */
966FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
967
968
969/** Opcode 0x0f 0x0f. */
970FNIEMOP_DEF(iemOp_3Dnow)
971{
972 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
973 {
974 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
975 return IEMOP_RAISE_INVALID_OPCODE();
976 }
977
978 /* This is pretty sparse, use switch instead of table. */
979 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
980 switch (b)
981 {
982 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
983 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
984 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
985 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
986 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
987 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
988 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
989 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
990 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
991 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
992 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
993 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
994 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
995 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
996 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
997 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
998 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
999 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1000 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1001 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1002 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1003 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1004 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1005 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1006 default:
1007 return IEMOP_RAISE_INVALID_OPCODE();
1008 }
1009}
1010
1011
1012/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1013FNIEMOP_STUB(iemOp_vmovups_Vps_Wps);
1014/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1015FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
1016/** Opcode 0xf3 0x0f 0x10 - vmovss Vx, Hx, Wss */
1017FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss);
1018/** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1019FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
1020
1021
1022/** Opcode 0x0f 0x11 - vmovups Wps, Vps */
1023FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
1024{
1025 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1026 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1027 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1028 {
1029 /*
1030 * Register, register.
1031 */
1032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1033 IEM_MC_BEGIN(0, 0);
1034 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1035 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1036 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1037 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1038 IEM_MC_ADVANCE_RIP();
1039 IEM_MC_END();
1040 }
1041 else
1042 {
1043 /*
1044 * Memory, register.
1045 */
1046 IEM_MC_BEGIN(0, 2);
1047 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1049
1050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1052 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1053 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1054
1055 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1056 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1057
1058 IEM_MC_ADVANCE_RIP();
1059 IEM_MC_END();
1060 }
1061 return VINF_SUCCESS;
1062}
1063
1064
1065/** Opcode 0x66 0x0f 0x11 - vmovupd Wpd,Vpd */
1066FNIEMOP_STUB(iemOp_vmovupd_Wpd_Vpd);
1067
1068/** Opcode 0xf3 0x0f 0x11 - vmovss Wss, Hx, Vss */
1069FNIEMOP_STUB(iemOp_vmovss_Wss_Hx_Vss);
1070
1071/** Opcode 0xf2 0x0f 0x11 - vmovsd Wsd, Hx, Vsd */
1072FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
1073{
1074 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1076 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1077 {
1078 /*
1079 * Register, register.
1080 */
1081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1082 IEM_MC_BEGIN(0, 1);
1083 IEM_MC_LOCAL(uint64_t, uSrc);
1084
1085 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1086 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1087 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1088 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1089
1090 IEM_MC_ADVANCE_RIP();
1091 IEM_MC_END();
1092 }
1093 else
1094 {
1095 /*
1096 * Memory, register.
1097 */
1098 IEM_MC_BEGIN(0, 2);
1099 IEM_MC_LOCAL(uint64_t, uSrc);
1100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1101
1102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1104 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1105 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1106
1107 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1108 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1109
1110 IEM_MC_ADVANCE_RIP();
1111 IEM_MC_END();
1112 }
1113 return VINF_SUCCESS;
1114}
1115
1116
1117/** Opcode 0x0f 0x12. */
1118FNIEMOP_STUB(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps); //NEXT
1119
1120/** Opcode 0x66 0x0f 0x12. */
1121FNIEMOP_STUB(iemOp_vmovlpd_Vq_Hq_Mq); //NEXT
1122
1123/** Opcode 0xf3 0x0f 0x12. */
1124FNIEMOP_STUB(iemOp_vmovsldup_Vx_Wx); //NEXT
1125
1126/** Opcode 0xf2 0x0f 0x12. */
1127FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); //NEXT
1128
1129/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1130FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq);
1131
1132/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1133FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1134{
1135 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1136 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1137 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1138 {
1139#if 0
1140 /*
1141 * Register, register.
1142 */
1143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1144 IEM_MC_BEGIN(0, 1);
1145 IEM_MC_LOCAL(uint64_t, uSrc);
1146 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1147 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1148 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1149 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1150 IEM_MC_ADVANCE_RIP();
1151 IEM_MC_END();
1152#else
1153 return IEMOP_RAISE_INVALID_OPCODE();
1154#endif
1155 }
1156 else
1157 {
1158 /*
1159 * Memory, register.
1160 */
1161 IEM_MC_BEGIN(0, 2);
1162 IEM_MC_LOCAL(uint64_t, uSrc);
1163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1164
1165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1167 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1168 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1169
1170 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1171 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1172
1173 IEM_MC_ADVANCE_RIP();
1174 IEM_MC_END();
1175 }
1176 return VINF_SUCCESS;
1177}
1178
1179/* Opcode 0xf3 0x0f 0x13 - invalid */
1180/* Opcode 0xf2 0x0f 0x13 - invalid */
1181
1182/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1183FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1184/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1185FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1186/* Opcode 0xf3 0x0f 0x14 - invalid */
1187/* Opcode 0xf2 0x0f 0x14 - invalid */
1188/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1189FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1190/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1191FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1192/* Opcode 0xf3 0x0f 0x15 - invalid */
1193/* Opcode 0xf2 0x0f 0x15 - invalid */
1194/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1195FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1196/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1197FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1198/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1199FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1200/* Opcode 0xf2 0x0f 0x16 - invalid */
1201/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1202FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1203/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1204FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1205/* Opcode 0xf3 0x0f 0x17 - invalid */
1206/* Opcode 0xf2 0x0f 0x17 - invalid */
1207
1208
1209/** Opcode 0x0f 0x18. */
1210FNIEMOP_DEF(iemOp_prefetch_Grp16)
1211{
1212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1213 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1214 {
1215 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1216 {
1217 case 4: /* Aliased to /0 for the time being according to AMD. */
1218 case 5: /* Aliased to /0 for the time being according to AMD. */
1219 case 6: /* Aliased to /0 for the time being according to AMD. */
1220 case 7: /* Aliased to /0 for the time being according to AMD. */
1221 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1222 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1223 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1224 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1225 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1226 }
1227
1228 IEM_MC_BEGIN(0, 1);
1229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1232 /* Currently a NOP. */
1233 NOREF(GCPtrEffSrc);
1234 IEM_MC_ADVANCE_RIP();
1235 IEM_MC_END();
1236 return VINF_SUCCESS;
1237 }
1238
1239 return IEMOP_RAISE_INVALID_OPCODE();
1240}
1241
1242
1243/** Opcode 0x0f 0x19..0x1f. */
1244FNIEMOP_DEF(iemOp_nop_Ev)
1245{
1246 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1247 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1248 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1249 {
1250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1251 IEM_MC_BEGIN(0, 0);
1252 IEM_MC_ADVANCE_RIP();
1253 IEM_MC_END();
1254 }
1255 else
1256 {
1257 IEM_MC_BEGIN(0, 1);
1258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1261 /* Currently a NOP. */
1262 NOREF(GCPtrEffSrc);
1263 IEM_MC_ADVANCE_RIP();
1264 IEM_MC_END();
1265 }
1266 return VINF_SUCCESS;
1267}
1268
1269
1270/** Opcode 0x0f 0x20. */
1271FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1272{
1273 /* mod is ignored, as is operand size overrides. */
1274 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1275 IEMOP_HLP_MIN_386();
1276 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1277 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1278 else
1279 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1280
1281 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1282 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1283 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1284 {
1285 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1286 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1287 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1288 iCrReg |= 8;
1289 }
1290 switch (iCrReg)
1291 {
1292 case 0: case 2: case 3: case 4: case 8:
1293 break;
1294 default:
1295 return IEMOP_RAISE_INVALID_OPCODE();
1296 }
1297 IEMOP_HLP_DONE_DECODING();
1298
1299 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1300}
1301
1302
1303/** Opcode 0x0f 0x21. */
1304FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1305{
1306 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1307 IEMOP_HLP_MIN_386();
1308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1310 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1311 return IEMOP_RAISE_INVALID_OPCODE();
1312 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1313 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1314 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1315}
1316
1317
1318/** Opcode 0x0f 0x22. */
1319FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1320{
1321 /* mod is ignored, as is operand size overrides. */
1322 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1323 IEMOP_HLP_MIN_386();
1324 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1325 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1326 else
1327 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1328
1329 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1330 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1331 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1332 {
1333 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1334 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1335 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1336 iCrReg |= 8;
1337 }
1338 switch (iCrReg)
1339 {
1340 case 0: case 2: case 3: case 4: case 8:
1341 break;
1342 default:
1343 return IEMOP_RAISE_INVALID_OPCODE();
1344 }
1345 IEMOP_HLP_DONE_DECODING();
1346
1347 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1348}
1349
1350
1351/** Opcode 0x0f 0x23. */
1352FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1353{
1354 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1355 IEMOP_HLP_MIN_386();
1356 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1358 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1359 return IEMOP_RAISE_INVALID_OPCODE();
1360 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1361 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1362 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1363}
1364
1365
1366/** Opcode 0x0f 0x24. */
1367FNIEMOP_DEF(iemOp_mov_Rd_Td)
1368{
1369 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1370 /** @todo works on 386 and 486. */
1371 /* The RM byte is not considered, see testcase. */
1372 return IEMOP_RAISE_INVALID_OPCODE();
1373}
1374
1375
1376/** Opcode 0x0f 0x26. */
1377FNIEMOP_DEF(iemOp_mov_Td_Rd)
1378{
1379 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1380 /** @todo works on 386 and 486. */
1381 /* The RM byte is not considered, see testcase. */
1382 return IEMOP_RAISE_INVALID_OPCODE();
1383}
1384
1385
1386/** Opcode 0x0f 0x28 - vmovaps Vps, Wps */
1387FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1388{
1389 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1390 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1391 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1392 {
1393 /*
1394 * Register, register.
1395 */
1396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1397 IEM_MC_BEGIN(0, 0);
1398 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1399 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1400 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1401 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1402 IEM_MC_ADVANCE_RIP();
1403 IEM_MC_END();
1404 }
1405 else
1406 {
1407 /*
1408 * Register, memory.
1409 */
1410 IEM_MC_BEGIN(0, 2);
1411 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1413
1414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1416 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1417 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1418
1419 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1420 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1421
1422 IEM_MC_ADVANCE_RIP();
1423 IEM_MC_END();
1424 }
1425 return VINF_SUCCESS;
1426}
1427
1428/** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */
1429FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1430{
1431 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1432 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1433 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1434 {
1435 /*
1436 * Register, register.
1437 */
1438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1439 IEM_MC_BEGIN(0, 0);
1440 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1441 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1442 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1443 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1444 IEM_MC_ADVANCE_RIP();
1445 IEM_MC_END();
1446 }
1447 else
1448 {
1449 /*
1450 * Register, memory.
1451 */
1452 IEM_MC_BEGIN(0, 2);
1453 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1455
1456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1458 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1459 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1460
1461 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1462 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1463
1464 IEM_MC_ADVANCE_RIP();
1465 IEM_MC_END();
1466 }
1467 return VINF_SUCCESS;
1468}
1469
1470/* Opcode 0xf3 0x0f 0x28 - invalid */
1471/* Opcode 0xf2 0x0f 0x28 - invalid */
1472
1473/** Opcode 0x0f 0x29 - vmovaps Wps, Vps */
1474FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1475{
1476 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1477 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1478 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1479 {
1480 /*
1481 * Register, register.
1482 */
1483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1484 IEM_MC_BEGIN(0, 0);
1485 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1486 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1487 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1488 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1489 IEM_MC_ADVANCE_RIP();
1490 IEM_MC_END();
1491 }
1492 else
1493 {
1494 /*
1495 * Memory, register.
1496 */
1497 IEM_MC_BEGIN(0, 2);
1498 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1500
1501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1503 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1504 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1505
1506 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1507 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1508
1509 IEM_MC_ADVANCE_RIP();
1510 IEM_MC_END();
1511 }
1512 return VINF_SUCCESS;
1513}
1514
1515/** Opcode 0x66 0x0f 0x29 - vmovapd Wpd,Vpd */
1516FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
1517{
1518 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
1519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1521 {
1522 /*
1523 * Register, register.
1524 */
1525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1526 IEM_MC_BEGIN(0, 0);
1527 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1528 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1529 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1530 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1531 IEM_MC_ADVANCE_RIP();
1532 IEM_MC_END();
1533 }
1534 else
1535 {
1536 /*
1537 * Memory, register.
1538 */
1539 IEM_MC_BEGIN(0, 2);
1540 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1542
1543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1545 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1546 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1547
1548 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1549 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1550
1551 IEM_MC_ADVANCE_RIP();
1552 IEM_MC_END();
1553 }
1554 return VINF_SUCCESS;
1555}
1556
1557/* Opcode 0xf3 0x0f 0x29 - invalid */
1558/* Opcode 0xf2 0x0f 0x29 - invalid */
1559
1560
1561/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
1562FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
1563/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
1564FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
1565/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
1566FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
1567/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
1568FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
1569
1570
1571/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
1572FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
1573{
1574 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
1575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1576 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1577 {
1578 /*
1579 * memory, register.
1580 */
1581 IEM_MC_BEGIN(0, 2);
1582 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1584
1585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1587 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1588 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1589
1590 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1591 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1592
1593 IEM_MC_ADVANCE_RIP();
1594 IEM_MC_END();
1595 }
1596 /* The register, register encoding is invalid. */
1597 else
1598 return IEMOP_RAISE_INVALID_OPCODE();
1599 return VINF_SUCCESS;
1600}
1601
1602/** Opcode 0x66 0x0f 0x2b - vmovntpd Mpd, Vpd */
1603FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
1604{
1605 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
1606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1607 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1608 {
1609 /*
1610 * memory, register.
1611 */
1612 IEM_MC_BEGIN(0, 2);
1613 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1615
1616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1618 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1619 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1620
1621 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1622 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1623
1624 IEM_MC_ADVANCE_RIP();
1625 IEM_MC_END();
1626 }
1627 /* The register, register encoding is invalid. */
1628 else
1629 return IEMOP_RAISE_INVALID_OPCODE();
1630 return VINF_SUCCESS;
1631}
1632/* Opcode 0xf3 0x0f 0x2b - invalid */
1633/* Opcode 0xf2 0x0f 0x2b - invalid */
1634
1635
1636/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
1637FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
1638/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
1639FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
1640/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
1641FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
1642/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
1643FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
1644
1645/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
1646FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
1647/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
1648FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
1649/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
1650FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
1651/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
1652FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
1653
1654/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
1655FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
1656/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
1657FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
1658/* Opcode 0xf3 0x0f 0x2e - invalid */
1659/* Opcode 0xf2 0x0f 0x2e - invalid */
1660
1661/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
1662FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
1663/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
1664FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
1665/* Opcode 0xf3 0x0f 0x2f - invalid */
1666/* Opcode 0xf2 0x0f 0x2f - invalid */
1667
1668/** Opcode 0x0f 0x30. */
1669FNIEMOP_DEF(iemOp_wrmsr)
1670{
1671 IEMOP_MNEMONIC(wrmsr, "wrmsr");
1672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1673 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1674}
1675
1676
1677/** Opcode 0x0f 0x31. */
1678FNIEMOP_DEF(iemOp_rdtsc)
1679{
1680 IEMOP_MNEMONIC(rdtsc, "rdtsc");
1681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1682 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1683}
1684
1685
1686/** Opcode 0x0f 0x33. */
1687FNIEMOP_DEF(iemOp_rdmsr)
1688{
1689 IEMOP_MNEMONIC(rdmsr, "rdmsr");
1690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1691 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1692}
1693
1694
1695/** Opcode 0x0f 0x34. */
1696FNIEMOP_STUB(iemOp_rdpmc);
1697/** Opcode 0x0f 0x34. */
1698FNIEMOP_STUB(iemOp_sysenter);
1699/** Opcode 0x0f 0x35. */
1700FNIEMOP_STUB(iemOp_sysexit);
1701/** Opcode 0x0f 0x37. */
1702FNIEMOP_STUB(iemOp_getsec);
1703/** Opcode 0x0f 0x38. */
1704FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1705/** Opcode 0x0f 0x3a. */
1706FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1707
1708
1709/**
1710 * Implements a conditional move.
1711 *
1712 * Wish there was an obvious way to do this where we could share and reduce
1713 * code bloat.
1714 *
1715 * @param a_Cnd The conditional "microcode" operation.
1716 */
1717#define CMOV_X(a_Cnd) \
1718 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1719 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1720 { \
1721 switch (pVCpu->iem.s.enmEffOpSize) \
1722 { \
1723 case IEMMODE_16BIT: \
1724 IEM_MC_BEGIN(0, 1); \
1725 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1726 a_Cnd { \
1727 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1728 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
1729 } IEM_MC_ENDIF(); \
1730 IEM_MC_ADVANCE_RIP(); \
1731 IEM_MC_END(); \
1732 return VINF_SUCCESS; \
1733 \
1734 case IEMMODE_32BIT: \
1735 IEM_MC_BEGIN(0, 1); \
1736 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1737 a_Cnd { \
1738 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1739 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
1740 } IEM_MC_ELSE() { \
1741 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
1742 } IEM_MC_ENDIF(); \
1743 IEM_MC_ADVANCE_RIP(); \
1744 IEM_MC_END(); \
1745 return VINF_SUCCESS; \
1746 \
1747 case IEMMODE_64BIT: \
1748 IEM_MC_BEGIN(0, 1); \
1749 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1750 a_Cnd { \
1751 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
1752 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
1753 } IEM_MC_ENDIF(); \
1754 IEM_MC_ADVANCE_RIP(); \
1755 IEM_MC_END(); \
1756 return VINF_SUCCESS; \
1757 \
1758 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1759 } \
1760 } \
1761 else \
1762 { \
1763 switch (pVCpu->iem.s.enmEffOpSize) \
1764 { \
1765 case IEMMODE_16BIT: \
1766 IEM_MC_BEGIN(0, 2); \
1767 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1768 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1769 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1770 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1771 a_Cnd { \
1772 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
1773 } IEM_MC_ENDIF(); \
1774 IEM_MC_ADVANCE_RIP(); \
1775 IEM_MC_END(); \
1776 return VINF_SUCCESS; \
1777 \
1778 case IEMMODE_32BIT: \
1779 IEM_MC_BEGIN(0, 2); \
1780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1781 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1783 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1784 a_Cnd { \
1785 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
1786 } IEM_MC_ELSE() { \
1787 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
1788 } IEM_MC_ENDIF(); \
1789 IEM_MC_ADVANCE_RIP(); \
1790 IEM_MC_END(); \
1791 return VINF_SUCCESS; \
1792 \
1793 case IEMMODE_64BIT: \
1794 IEM_MC_BEGIN(0, 2); \
1795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1796 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1798 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1799 a_Cnd { \
1800 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
1801 } IEM_MC_ENDIF(); \
1802 IEM_MC_ADVANCE_RIP(); \
1803 IEM_MC_END(); \
1804 return VINF_SUCCESS; \
1805 \
1806 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1807 } \
1808 } do {} while (0)
1809
1810
1811
1812/** Opcode 0x0f 0x40. */
1813FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1814{
1815 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
1816 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1817}
1818
1819
1820/** Opcode 0x0f 0x41. */
1821FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1822{
1823 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
1824 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
1825}
1826
1827
1828/** Opcode 0x0f 0x42. */
1829FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
1830{
1831 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
1832 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
1833}
1834
1835
1836/** Opcode 0x0f 0x43. */
1837FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
1838{
1839 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
1840 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
1841}
1842
1843
1844/** Opcode 0x0f 0x44. */
1845FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
1846{
1847 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
1848 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
1849}
1850
1851
1852/** Opcode 0x0f 0x45. */
1853FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
1854{
1855 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
1856 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
1857}
1858
1859
1860/** Opcode 0x0f 0x46. */
1861FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
1862{
1863 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
1864 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1865}
1866
1867
1868/** Opcode 0x0f 0x47. */
1869FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
1870{
1871 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
1872 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1873}
1874
1875
1876/** Opcode 0x0f 0x48. */
1877FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
1878{
1879 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
1880 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
1881}
1882
1883
1884/** Opcode 0x0f 0x49. */
1885FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
1886{
1887 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
1888 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
1889}
1890
1891
1892/** Opcode 0x0f 0x4a. */
1893FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
1894{
1895 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
1896 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
1897}
1898
1899
1900/** Opcode 0x0f 0x4b. */
1901FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
1902{
1903 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
1904 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
1905}
1906
1907
1908/** Opcode 0x0f 0x4c. */
1909FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
1910{
1911 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
1912 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
1913}
1914
1915
1916/** Opcode 0x0f 0x4d. */
1917FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
1918{
1919 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
1920 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
1921}
1922
1923
1924/** Opcode 0x0f 0x4e. */
1925FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
1926{
1927 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
1928 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1929}
1930
1931
1932/** Opcode 0x0f 0x4f. */
1933FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
1934{
1935 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
1936 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1937}
1938
1939#undef CMOV_X
1940
1941/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
1942FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
1943/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
1944FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
1945/* Opcode 0xf3 0x0f 0x50 - invalid */
1946/* Opcode 0xf2 0x0f 0x50 - invalid */
1947
1948/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
1949FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
1950/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
1951FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
1952/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
1953FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
1954/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
1955FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
1956
1957/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
1958FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
1959/* Opcode 0x66 0x0f 0x52 - invalid */
1960/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
1961FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
1962/* Opcode 0xf2 0x0f 0x52 - invalid */
1963
1964/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
1965FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
1966/* Opcode 0x66 0x0f 0x53 - invalid */
1967/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
1968FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
1969/* Opcode 0xf2 0x0f 0x53 - invalid */
1970
1971/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
1972FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
1973/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
1974FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
1975/* Opcode 0xf3 0x0f 0x54 - invalid */
1976/* Opcode 0xf2 0x0f 0x54 - invalid */
1977
1978/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
1979FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
1980/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
1981FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
1982/* Opcode 0xf3 0x0f 0x55 - invalid */
1983/* Opcode 0xf2 0x0f 0x55 - invalid */
1984
1985/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
1986FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
1987/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
1988FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
1989/* Opcode 0xf3 0x0f 0x56 - invalid */
1990/* Opcode 0xf2 0x0f 0x56 - invalid */
1991
1992/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
1993FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
1994/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
1995FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
1996/* Opcode 0xf3 0x0f 0x57 - invalid */
1997/* Opcode 0xf2 0x0f 0x57 - invalid */
1998
1999/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
2000FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2001/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
2002FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2003/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
2004FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2005/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
2006FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2007
2008/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
2009FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2010/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
2011FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2012/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
2013FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2014/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
2015FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2016
2017/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
2018FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2019/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
2020FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2021/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
2022FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2023/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2024FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2025
2026/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
2027FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2028/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
2029FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2030/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
2031FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2032/* Opcode 0xf2 0x0f 0x5b - invalid */
2033
2034/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
2035FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2036/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
2037FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2038/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
2039FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2040/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
2041FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2042
2043/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
2044FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2045/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
2046FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2047/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
2048FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2049/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
2050FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2051
2052/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
2053FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2054/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
2055FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2056/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
2057FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2058/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
2059FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2060
2061/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
2062FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2063/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
2064FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2065/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
2066FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2067/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
2068FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2069
2070/**
2071 * Common worker for MMX instructions on the forms:
2072 * pxxxx mm1, mm2/mem32
2073 *
2074 * The 2nd operand is the first half of a register, which in the memory case
2075 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2076 * memory accessed for MMX.
2077 *
2078 * Exceptions type 4.
2079 */
2080FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2081{
2082 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2083 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2084 {
2085 /*
2086 * Register, register.
2087 */
2088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2089 IEM_MC_BEGIN(2, 0);
2090 IEM_MC_ARG(uint128_t *, pDst, 0);
2091 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2092 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2093 IEM_MC_PREPARE_SSE_USAGE();
2094 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2095 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2096 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2097 IEM_MC_ADVANCE_RIP();
2098 IEM_MC_END();
2099 }
2100 else
2101 {
2102 /*
2103 * Register, memory.
2104 */
2105 IEM_MC_BEGIN(2, 2);
2106 IEM_MC_ARG(uint128_t *, pDst, 0);
2107 IEM_MC_LOCAL(uint64_t, uSrc);
2108 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2110
2111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2113 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2114 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2115
2116 IEM_MC_PREPARE_SSE_USAGE();
2117 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2118 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2119
2120 IEM_MC_ADVANCE_RIP();
2121 IEM_MC_END();
2122 }
2123 return VINF_SUCCESS;
2124}
2125
2126
2127/**
2128 * Common worker for SSE2 instructions on the forms:
2129 * pxxxx xmm1, xmm2/mem128
2130 *
2131 * The 2nd operand is the first half of a register, which in the memory case
2132 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2133 * memory accessed for MMX.
2134 *
2135 * Exceptions type 4.
2136 */
2137FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2138{
2139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2140 if (!pImpl->pfnU64)
2141 return IEMOP_RAISE_INVALID_OPCODE();
2142 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2143 {
2144 /*
2145 * Register, register.
2146 */
2147 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2148 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2150 IEM_MC_BEGIN(2, 0);
2151 IEM_MC_ARG(uint64_t *, pDst, 0);
2152 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2153 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2154 IEM_MC_PREPARE_FPU_USAGE();
2155 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2156 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2157 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2158 IEM_MC_ADVANCE_RIP();
2159 IEM_MC_END();
2160 }
2161 else
2162 {
2163 /*
2164 * Register, memory.
2165 */
2166 IEM_MC_BEGIN(2, 2);
2167 IEM_MC_ARG(uint64_t *, pDst, 0);
2168 IEM_MC_LOCAL(uint32_t, uSrc);
2169 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2171
2172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2174 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2175 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2176
2177 IEM_MC_PREPARE_FPU_USAGE();
2178 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2179 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2180
2181 IEM_MC_ADVANCE_RIP();
2182 IEM_MC_END();
2183 }
2184 return VINF_SUCCESS;
2185}
2186
2187
2188/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2189FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2190{
2191 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2192 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2193}
2194
2195/** Opcode 0x66 0x0f 0x60 - vpunpcklbw Vx, Hx, W */
2196FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2197{
2198 IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
2199 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2200}
2201
2202/* Opcode 0xf3 0x0f 0x60 - invalid */
2203
2204
2205/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2206FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2207{
2208 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2209 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2210}
2211
2212/** Opcode 0x66 0x0f 0x61 - vpunpcklwd Vx, Hx, Wx */
2213FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2214{
2215 IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
2216 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2217}
2218
2219/* Opcode 0xf3 0x0f 0x61 - invalid */
2220
2221
2222/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2223FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2224{
2225 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2226 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2227}
2228
2229/** Opcode 0x66 0x0f 0x62 - vpunpckldq Vx, Hx, Wx */
2230FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2231{
2232 IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
2233 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2234}
2235
2236/* Opcode 0xf3 0x0f 0x62 - invalid */
2237
2238
2239
2240/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2241FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2242/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2243FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2244/* Opcode 0xf3 0x0f 0x63 - invalid */
2245
2246/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2247FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2248/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2249FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2250/* Opcode 0xf3 0x0f 0x64 - invalid */
2251
2252/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2253FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2254/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2255FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2256/* Opcode 0xf3 0x0f 0x65 - invalid */
2257
2258/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2259FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2260/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2261FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2262/* Opcode 0xf3 0x0f 0x66 - invalid */
2263
2264/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2265FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2266/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2267FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2268/* Opcode 0xf3 0x0f 0x67 - invalid */
2269
2270
2271/**
2272 * Common worker for MMX instructions on the form:
2273 * pxxxx mm1, mm2/mem64
2274 *
2275 * The 2nd operand is the second half of a register, which in the memory case
2276 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2277 * where it may read the full 128 bits or only the upper 64 bits.
2278 *
2279 * Exceptions type 4.
2280 */
2281FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2282{
2283 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2284 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2285 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2286 {
2287 /*
2288 * Register, register.
2289 */
2290 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2291 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2293 IEM_MC_BEGIN(2, 0);
2294 IEM_MC_ARG(uint64_t *, pDst, 0);
2295 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2296 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2297 IEM_MC_PREPARE_FPU_USAGE();
2298 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2299 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2300 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2301 IEM_MC_ADVANCE_RIP();
2302 IEM_MC_END();
2303 }
2304 else
2305 {
2306 /*
2307 * Register, memory.
2308 */
2309 IEM_MC_BEGIN(2, 2);
2310 IEM_MC_ARG(uint64_t *, pDst, 0);
2311 IEM_MC_LOCAL(uint64_t, uSrc);
2312 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2314
2315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2317 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2318 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2319
2320 IEM_MC_PREPARE_FPU_USAGE();
2321 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2322 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2323
2324 IEM_MC_ADVANCE_RIP();
2325 IEM_MC_END();
2326 }
2327 return VINF_SUCCESS;
2328}
2329
2330
2331/**
2332 * Common worker for SSE2 instructions on the form:
2333 * pxxxx xmm1, xmm2/mem128
2334 *
2335 * The 2nd operand is the second half of a register, which in the memory case
2336 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2337 * where it may read the full 128 bits or only the upper 64 bits.
2338 *
2339 * Exceptions type 4.
2340 */
2341FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2342{
2343 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2344 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2345 {
2346 /*
2347 * Register, register.
2348 */
2349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2350 IEM_MC_BEGIN(2, 0);
2351 IEM_MC_ARG(uint128_t *, pDst, 0);
2352 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2353 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2354 IEM_MC_PREPARE_SSE_USAGE();
2355 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2356 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2357 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2358 IEM_MC_ADVANCE_RIP();
2359 IEM_MC_END();
2360 }
2361 else
2362 {
2363 /*
2364 * Register, memory.
2365 */
2366 IEM_MC_BEGIN(2, 2);
2367 IEM_MC_ARG(uint128_t *, pDst, 0);
2368 IEM_MC_LOCAL(uint128_t, uSrc);
2369 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2371
2372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2374 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2375 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2376
2377 IEM_MC_PREPARE_SSE_USAGE();
2378 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2379 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2380
2381 IEM_MC_ADVANCE_RIP();
2382 IEM_MC_END();
2383 }
2384 return VINF_SUCCESS;
2385}
2386
2387
2388/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2389FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2390{
2391 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2392 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2393}
2394
2395/** Opcode 0x66 0x0f 0x68 - vpunpckhbw Vx, Hx, Wx */
2396FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2397{
2398 IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx");
2399 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2400}
2401/* Opcode 0xf3 0x0f 0x68 - invalid */
2402
2403
2404/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2405FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2406{
2407 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2408 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2409}
2410
2411/** Opcode 0x66 0x0f 0x69 - vpunpckhwd Vx, Hx, Wx */
2412FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2413{
2414 IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx");
2415 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2416
2417}
2418/* Opcode 0xf3 0x0f 0x69 - invalid */
2419
2420
2421/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2422FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2423{
2424 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2425 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2426}
2427
2428/** Opcode 0x66 0x0f 0x6a - vpunpckhdq Vx, Hx, W */
2429FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2430{
2431 IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W");
2432 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2433}
2434/* Opcode 0xf3 0x0f 0x6a - invalid */
2435
2436
2437/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2438FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2439/** Opcode 0x66 0x0f 0x6b - vpackssdw Vx, Hx, Wx */
2440FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx);
2441/* Opcode 0xf3 0x0f 0x6b - invalid */
2442
2443
2444/* Opcode 0x0f 0x6c - invalid */
2445
2446/** Opcode 0x66 0x0f 0x6c - vpunpcklqdq Vx, Hx, Wx */
2447FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2448{
2449 IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
2450 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2451}
2452
2453/* Opcode 0xf3 0x0f 0x6c - invalid */
2454/* Opcode 0xf2 0x0f 0x6c - invalid */
2455
2456
2457/* Opcode 0x0f 0x6d - invalid */
2458
2459/** Opcode 0x66 0x0f 0x6d - vpunpckhqdq Vx, Hx, W */
2460FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
2461{
2462 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2463 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2464}
2465
2466/* Opcode 0xf3 0x0f 0x6d - invalid */
2467
2468
2469/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
2470FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
2471{
2472 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2473 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2474 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2475 else
2476 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2477 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2478 {
2479 /* MMX, greg */
2480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2481 IEM_MC_BEGIN(0, 1);
2482 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2483 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2484 IEM_MC_LOCAL(uint64_t, u64Tmp);
2485 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2486 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2487 else
2488 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2489 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2490 IEM_MC_ADVANCE_RIP();
2491 IEM_MC_END();
2492 }
2493 else
2494 {
2495 /* MMX, [mem] */
2496 IEM_MC_BEGIN(0, 2);
2497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2498 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2501 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2502 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2503 {
2504 IEM_MC_LOCAL(uint64_t, u64Tmp);
2505 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2506 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2507 }
2508 else
2509 {
2510 IEM_MC_LOCAL(uint32_t, u32Tmp);
2511 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2512 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2513 }
2514 IEM_MC_ADVANCE_RIP();
2515 IEM_MC_END();
2516 }
2517 return VINF_SUCCESS;
2518}
2519
2520/** Opcode 0x66 0x0f 0x6e - vmovd/q Vy, Ey */
2521FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
2522{
2523 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2524 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2525 IEMOP_MNEMONIC(vmovdq_Wq_Eq, "vmovq Wq,Eq");
2526 else
2527 IEMOP_MNEMONIC(vmovdq_Wd_Ed, "vmovd Wd,Ed");
2528 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2529 {
2530 /* XMM, greg*/
2531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2532 IEM_MC_BEGIN(0, 1);
2533 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2534 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2535 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2536 {
2537 IEM_MC_LOCAL(uint64_t, u64Tmp);
2538 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2539 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2540 }
2541 else
2542 {
2543 IEM_MC_LOCAL(uint32_t, u32Tmp);
2544 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2545 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2546 }
2547 IEM_MC_ADVANCE_RIP();
2548 IEM_MC_END();
2549 }
2550 else
2551 {
2552 /* XMM, [mem] */
2553 IEM_MC_BEGIN(0, 2);
2554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2555 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2558 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2559 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2560 {
2561 IEM_MC_LOCAL(uint64_t, u64Tmp);
2562 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2563 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2564 }
2565 else
2566 {
2567 IEM_MC_LOCAL(uint32_t, u32Tmp);
2568 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2569 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2570 }
2571 IEM_MC_ADVANCE_RIP();
2572 IEM_MC_END();
2573 }
2574 return VINF_SUCCESS;
2575}
2576
2577/* Opcode 0xf3 0x0f 0x6e - invalid */
2578
2579
2580/** Opcode 0x0f 0x6f - movq Pq, Qq */
2581FNIEMOP_DEF(iemOp_movq_Pq_Qq)
2582{
2583 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2584 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
2585 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2586 {
2587 /*
2588 * Register, register.
2589 */
2590 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2591 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2593 IEM_MC_BEGIN(0, 1);
2594 IEM_MC_LOCAL(uint64_t, u64Tmp);
2595 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2596 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2597 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2598 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2599 IEM_MC_ADVANCE_RIP();
2600 IEM_MC_END();
2601 }
2602 else
2603 {
2604 /*
2605 * Register, memory.
2606 */
2607 IEM_MC_BEGIN(0, 2);
2608 IEM_MC_LOCAL(uint64_t, u64Tmp);
2609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2610
2611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2613 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2614 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2615 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2616 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2617
2618 IEM_MC_ADVANCE_RIP();
2619 IEM_MC_END();
2620 }
2621 return VINF_SUCCESS;
2622}
2623
2624/** Opcode 0x66 0x0f 0x6f - vmovdqa Vx, Wx */
2625FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
2626{
2627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2628 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
2629 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2630 {
2631 /*
2632 * Register, register.
2633 */
2634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2635 IEM_MC_BEGIN(0, 0);
2636 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2637 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2638 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2639 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2640 IEM_MC_ADVANCE_RIP();
2641 IEM_MC_END();
2642 }
2643 else
2644 {
2645 /*
2646 * Register, memory.
2647 */
2648 IEM_MC_BEGIN(0, 2);
2649 IEM_MC_LOCAL(uint128_t, u128Tmp);
2650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2651
2652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2654 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2655 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2656 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2657 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2658
2659 IEM_MC_ADVANCE_RIP();
2660 IEM_MC_END();
2661 }
2662 return VINF_SUCCESS;
2663}
2664
2665/** Opcode 0xf3 0x0f 0x6f - vmovdqu Vx, Wx */
2666FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
2667{
2668 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2669 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
2670 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2671 {
2672 /*
2673 * Register, register.
2674 */
2675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2676 IEM_MC_BEGIN(0, 0);
2677 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2678 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2679 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2680 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2681 IEM_MC_ADVANCE_RIP();
2682 IEM_MC_END();
2683 }
2684 else
2685 {
2686 /*
2687 * Register, memory.
2688 */
2689 IEM_MC_BEGIN(0, 2);
2690 IEM_MC_LOCAL(uint128_t, u128Tmp);
2691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2692
2693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2695 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2696 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2697 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2698 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2699
2700 IEM_MC_ADVANCE_RIP();
2701 IEM_MC_END();
2702 }
2703 return VINF_SUCCESS;
2704}
2705
2706
2707/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
2708FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
2709{
2710 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
2711 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2712 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2713 {
2714 /*
2715 * Register, register.
2716 */
2717 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2719
2720 IEM_MC_BEGIN(3, 0);
2721 IEM_MC_ARG(uint64_t *, pDst, 0);
2722 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2723 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2724 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2725 IEM_MC_PREPARE_FPU_USAGE();
2726 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2727 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2728 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2729 IEM_MC_ADVANCE_RIP();
2730 IEM_MC_END();
2731 }
2732 else
2733 {
2734 /*
2735 * Register, memory.
2736 */
2737 IEM_MC_BEGIN(3, 2);
2738 IEM_MC_ARG(uint64_t *, pDst, 0);
2739 IEM_MC_LOCAL(uint64_t, uSrc);
2740 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2742
2743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2744 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2745 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2747 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2748
2749 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2750 IEM_MC_PREPARE_FPU_USAGE();
2751 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2752 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2753
2754 IEM_MC_ADVANCE_RIP();
2755 IEM_MC_END();
2756 }
2757 return VINF_SUCCESS;
2758}
2759
2760/** Opcode 0x66 0x0f 0x70 - vpshufd Vx, Wx, Ib */
2761FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
2762{
2763 IEMOP_MNEMONIC(vpshufd_Vx_Wx_Ib, "vpshufd Vx,Wx,Ib");
2764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2765 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2766 {
2767 /*
2768 * Register, register.
2769 */
2770 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2772
2773 IEM_MC_BEGIN(3, 0);
2774 IEM_MC_ARG(uint128_t *, pDst, 0);
2775 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2776 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2777 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2778 IEM_MC_PREPARE_SSE_USAGE();
2779 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2780 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2781 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
2782 IEM_MC_ADVANCE_RIP();
2783 IEM_MC_END();
2784 }
2785 else
2786 {
2787 /*
2788 * Register, memory.
2789 */
2790 IEM_MC_BEGIN(3, 2);
2791 IEM_MC_ARG(uint128_t *, pDst, 0);
2792 IEM_MC_LOCAL(uint128_t, uSrc);
2793 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2795
2796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2797 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2798 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2800 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2801
2802 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2803 IEM_MC_PREPARE_SSE_USAGE();
2804 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2805 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
2806
2807 IEM_MC_ADVANCE_RIP();
2808 IEM_MC_END();
2809 }
2810 return VINF_SUCCESS;
2811}
2812
2813/** Opcode 0xf3 0x0f 0x70 - vpshufhw Vx, Wx, Ib */
2814FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
2815{
2816 IEMOP_MNEMONIC(vpshufhw_Vx_Wx_Ib, "vpshufhw Vx,Wx,Ib");
2817 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2818 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2819 {
2820 /*
2821 * Register, register.
2822 */
2823 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2825
2826 IEM_MC_BEGIN(3, 0);
2827 IEM_MC_ARG(uint128_t *, pDst, 0);
2828 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2829 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2830 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2831 IEM_MC_PREPARE_SSE_USAGE();
2832 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2833 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2834 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
2835 IEM_MC_ADVANCE_RIP();
2836 IEM_MC_END();
2837 }
2838 else
2839 {
2840 /*
2841 * Register, memory.
2842 */
2843 IEM_MC_BEGIN(3, 2);
2844 IEM_MC_ARG(uint128_t *, pDst, 0);
2845 IEM_MC_LOCAL(uint128_t, uSrc);
2846 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2848
2849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2850 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2851 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2853 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2854
2855 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2856 IEM_MC_PREPARE_SSE_USAGE();
2857 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2858 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
2859
2860 IEM_MC_ADVANCE_RIP();
2861 IEM_MC_END();
2862 }
2863 return VINF_SUCCESS;
2864}
2865
2866/** Opcode 0xf2 0x0f 0x70 - vpshuflw Vx, Wx, Ib */
2867FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
2868{
2869 IEMOP_MNEMONIC(vpshuflw_Vx_Wx_Ib, "vpshuflw Vx,Wx,Ib");
2870 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2871 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2872 {
2873 /*
2874 * Register, register.
2875 */
2876 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2878
2879 IEM_MC_BEGIN(3, 0);
2880 IEM_MC_ARG(uint128_t *, pDst, 0);
2881 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2882 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2883 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2884 IEM_MC_PREPARE_SSE_USAGE();
2885 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2886 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2887 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
2888 IEM_MC_ADVANCE_RIP();
2889 IEM_MC_END();
2890 }
2891 else
2892 {
2893 /*
2894 * Register, memory.
2895 */
2896 IEM_MC_BEGIN(3, 2);
2897 IEM_MC_ARG(uint128_t *, pDst, 0);
2898 IEM_MC_LOCAL(uint128_t, uSrc);
2899 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2901
2902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2903 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2904 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2906 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2907
2908 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2909 IEM_MC_PREPARE_SSE_USAGE();
2910 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2911 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
2912
2913 IEM_MC_ADVANCE_RIP();
2914 IEM_MC_END();
2915 }
2916 return VINF_SUCCESS;
2917}
2918
2919
2920/** Opcode 0x0f 0x71 11/2. */
2921FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2922
2923/** Opcode 0x66 0x0f 0x71 11/2. */
2924FNIEMOP_STUB_1(iemOp_Grp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm);
2925
2926/** Opcode 0x0f 0x71 11/4. */
2927FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2928
2929/** Opcode 0x66 0x0f 0x71 11/4. */
2930FNIEMOP_STUB_1(iemOp_Grp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm);
2931
2932/** Opcode 0x0f 0x71 11/6. */
2933FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2934
2935/** Opcode 0x66 0x0f 0x71 11/6. */
2936FNIEMOP_STUB_1(iemOp_Grp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm);
2937
2938
2939/**
2940 * Group 12 jump table for register variant.
2941 */
2942IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[8*4] =
2943{
2944 /** @todo decode imm8? */
2945 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
2946 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
2947 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
2948 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
2949 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
2950 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
2951 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
2952 /* /7 */ IEMOP_X4(iemOp_InvalidWithRM)
2953};
2954
2955
2956/** Opcode 0x0f 0x71. */
2957FNIEMOP_DEF(iemOp_Grp12)
2958{
2959 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2960 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2961 /* register, register */
2962 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
2963 + pVCpu->iem.s.idxPrefix], bRm);
2964 /** @todo decode SIB, disp, Ib? */
2965 return IEMOP_RAISE_INVALID_OPCODE();
2966}
2967
2968
2969/** Opcode 0x0f 0x72 11/2. */
2970FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
2971
2972/** Opcode 0x66 0x0f 0x72 11/2. */
2973FNIEMOP_STUB_1(iemOp_Grp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm);
2974
2975/** Opcode 0x0f 0x72 11/4. */
2976FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
2977
2978/** Opcode 0x66 0x0f 0x72 11/4. */
2979FNIEMOP_STUB_1(iemOp_Grp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm);
2980
2981/** Opcode 0x0f 0x72 11/6. */
2982FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
2983
2984/** Opcode 0x66 0x0f 0x72 11/6. */
2985FNIEMOP_STUB_1(iemOp_Grp13_vpslld_Hx_Ux_Ib, uint8_t, bRm);
2986
2987
2988/**
2989 * Group 13 jump table for register variant.
2990 */
2991IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[8*4] =
2992{
2993 /** @todo decode imm8? */
2994 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
2995 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
2996 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
2997 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
2998 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
2999 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
3000 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3001 /* /7 */ IEMOP_X4(iemOp_InvalidWithRM)
3002};
3003
3004/** Opcode 0x0f 0x72. */
3005FNIEMOP_DEF(iemOp_Grp13)
3006{
3007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3008 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3009 /* register, register */
3010 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3011 + pVCpu->iem.s.idxPrefix], bRm);
3012 /** @todo decode SIB, disp, Ib? */
3013 return IEMOP_RAISE_INVALID_OPCODE();
3014}
3015
3016
3017/** Opcode 0x0f 0x73 11/2. */
3018FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3019
3020/** Opcode 0x66 0x0f 0x73 11/2. */
3021FNIEMOP_STUB_1(iemOp_Grp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm);
3022
3023/** Opcode 0x66 0x0f 0x73 11/3. */
3024FNIEMOP_STUB_1(iemOp_Grp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm); //NEXT
3025
3026/** Opcode 0x0f 0x73 11/6. */
3027FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3028
3029/** Opcode 0x66 0x0f 0x73 11/6. */
3030FNIEMOP_STUB_1(iemOp_Grp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm);
3031
3032/** Opcode 0x66 0x0f 0x73 11/7. */
3033FNIEMOP_STUB_1(iemOp_Grp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm); //NEXT
3034
3035/**
3036 * Group 14 jump table for register variant.
3037 */
3038IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[8*4] =
3039{
3040 /** @todo decode imm8? */
3041 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
3042 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
3043 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3044 /* /3 */ iemOp_InvalidWithRM, iemOp_Grp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3045 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
3046 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
3047 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3048 /* /7 */ iemOp_InvalidWithRM, iemOp_Grp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3049};
3050
3051
3052/** Opcode 0x0f 0x73. */
3053FNIEMOP_DEF(iemOp_Grp14)
3054{
3055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3056 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3057 /* register, register */
3058 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3059 + pVCpu->iem.s.idxPrefix], bRm);
3060 /** @todo decode SIB, disp, Ib? */
3061 return IEMOP_RAISE_INVALID_OPCODE();
3062}
3063
3064
3065/**
3066 * Common worker for MMX instructions on the form:
3067 * pxxx mm1, mm2/mem64
3068 */
3069FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3070{
3071 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3072 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3073 {
3074 /*
3075 * Register, register.
3076 */
3077 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3078 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3080 IEM_MC_BEGIN(2, 0);
3081 IEM_MC_ARG(uint64_t *, pDst, 0);
3082 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3083 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3084 IEM_MC_PREPARE_FPU_USAGE();
3085 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3086 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3087 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3088 IEM_MC_ADVANCE_RIP();
3089 IEM_MC_END();
3090 }
3091 else
3092 {
3093 /*
3094 * Register, memory.
3095 */
3096 IEM_MC_BEGIN(2, 2);
3097 IEM_MC_ARG(uint64_t *, pDst, 0);
3098 IEM_MC_LOCAL(uint64_t, uSrc);
3099 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3101
3102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3104 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3105 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3106
3107 IEM_MC_PREPARE_FPU_USAGE();
3108 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3109 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3110
3111 IEM_MC_ADVANCE_RIP();
3112 IEM_MC_END();
3113 }
3114 return VINF_SUCCESS;
3115}
3116
3117
3118/**
3119 * Common worker for SSE2 instructions on the forms:
3120 * pxxx xmm1, xmm2/mem128
3121 *
3122 * Proper alignment of the 128-bit operand is enforced.
3123 * Exceptions type 4. SSE2 cpuid checks.
3124 */
3125FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3126{
3127 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3128 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3129 {
3130 /*
3131 * Register, register.
3132 */
3133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3134 IEM_MC_BEGIN(2, 0);
3135 IEM_MC_ARG(uint128_t *, pDst, 0);
3136 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3137 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3138 IEM_MC_PREPARE_SSE_USAGE();
3139 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3140 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3141 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3142 IEM_MC_ADVANCE_RIP();
3143 IEM_MC_END();
3144 }
3145 else
3146 {
3147 /*
3148 * Register, memory.
3149 */
3150 IEM_MC_BEGIN(2, 2);
3151 IEM_MC_ARG(uint128_t *, pDst, 0);
3152 IEM_MC_LOCAL(uint128_t, uSrc);
3153 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3155
3156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3158 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3159 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3160
3161 IEM_MC_PREPARE_SSE_USAGE();
3162 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3163 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3164
3165 IEM_MC_ADVANCE_RIP();
3166 IEM_MC_END();
3167 }
3168 return VINF_SUCCESS;
3169}
3170
3171
3172/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3173FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3174{
3175 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3176 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3177}
3178
3179/** Opcode 0x66 0x0f 0x74 - vpcmpeqb Vx, Hx, Wx */
3180FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3181{
3182 IEMOP_MNEMONIC(vpcmpeqb, "vpcmpeqb");
3183 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3184}
3185
3186/* Opcode 0xf3 0x0f 0x74 - invalid */
3187/* Opcode 0xf2 0x0f 0x74 - invalid */
3188
3189
3190/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3191FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3192{
3193 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3194 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3195}
3196
3197/** Opcode 0x66 0x0f 0x75 - vpcmpeqw Vx, Hx, Wx */
3198FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3199{
3200 IEMOP_MNEMONIC(vpcmpeqw, "vpcmpeqw");
3201 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3202}
3203
3204/* Opcode 0xf3 0x0f 0x75 - invalid */
3205/* Opcode 0xf2 0x0f 0x75 - invalid */
3206
3207
3208/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3209FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3210{
3211 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3212 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3213}
3214
3215/** Opcode 0x66 0x0f 0x76 - vpcmpeqd Vx, Hx, Wx */
3216FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3217{
3218 IEMOP_MNEMONIC(vpcmpeqd, "vpcmpeqd");
3219 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3220}
3221
3222/* Opcode 0xf3 0x0f 0x76 - invalid */
3223/* Opcode 0xf2 0x0f 0x76 - invalid */
3224
3225
3226/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3227FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3228/* Opcode 0x66 0x0f 0x77 - invalid */
3229/* Opcode 0xf3 0x0f 0x77 - invalid */
3230/* Opcode 0xf2 0x0f 0x77 - invalid */
3231
3232/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3233FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3234/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3235FNIEMOP_STUB(iemOp_AmdGrp17);
3236/* Opcode 0xf3 0x0f 0x78 - invalid */
3237/* Opcode 0xf2 0x0f 0x78 - invalid */
3238
3239/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3240FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3241/* Opcode 0x66 0x0f 0x79 - invalid */
3242/* Opcode 0xf3 0x0f 0x79 - invalid */
3243/* Opcode 0xf2 0x0f 0x79 - invalid */
3244
3245/* Opcode 0x0f 0x7a - invalid */
3246/* Opcode 0x66 0x0f 0x7a - invalid */
3247/* Opcode 0xf3 0x0f 0x7a - invalid */
3248/* Opcode 0xf2 0x0f 0x7a - invalid */
3249
3250/* Opcode 0x0f 0x7b - invalid */
3251/* Opcode 0x66 0x0f 0x7b - invalid */
3252/* Opcode 0xf3 0x0f 0x7b - invalid */
3253/* Opcode 0xf2 0x0f 0x7b - invalid */
3254
3255/* Opcode 0x0f 0x7c - invalid */
3256/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3257FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3258/* Opcode 0xf3 0x0f 0x7c - invalid */
3259/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3260FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3261
3262/* Opcode 0x0f 0x7d - invalid */
3263/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3264FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3265/* Opcode 0xf3 0x0f 0x7d - invalid */
3266/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3267FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3268
3269
3270/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
3271FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
3272{
3273 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3274 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3275 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3276 else
3277 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3278 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3279 {
3280 /* greg, MMX */
3281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3282 IEM_MC_BEGIN(0, 1);
3283 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3284 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3285 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3286 {
3287 IEM_MC_LOCAL(uint64_t, u64Tmp);
3288 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3289 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3290 }
3291 else
3292 {
3293 IEM_MC_LOCAL(uint32_t, u32Tmp);
3294 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3295 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3296 }
3297 IEM_MC_ADVANCE_RIP();
3298 IEM_MC_END();
3299 }
3300 else
3301 {
3302 /* [mem], MMX */
3303 IEM_MC_BEGIN(0, 2);
3304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3305 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3306 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3308 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3309 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3310 {
3311 IEM_MC_LOCAL(uint64_t, u64Tmp);
3312 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3313 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3314 }
3315 else
3316 {
3317 IEM_MC_LOCAL(uint32_t, u32Tmp);
3318 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3319 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3320 }
3321 IEM_MC_ADVANCE_RIP();
3322 IEM_MC_END();
3323 }
3324 return VINF_SUCCESS;
3325}
3326
3327/** Opcode 0x66 0x0f 0x7e - vmovd_q Ey, Vy */
3328FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
3329{
3330 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3331 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3332 IEMOP_MNEMONIC(vmovq_Eq_Wq, "vmovq Eq,Wq");
3333 else
3334 IEMOP_MNEMONIC(vmovd_Ed_Wd, "vmovd Ed,Wd");
3335 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3336 {
3337 /* greg, XMM */
3338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3339 IEM_MC_BEGIN(0, 1);
3340 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3341 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3342 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3343 {
3344 IEM_MC_LOCAL(uint64_t, u64Tmp);
3345 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3346 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3347 }
3348 else
3349 {
3350 IEM_MC_LOCAL(uint32_t, u32Tmp);
3351 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3352 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3353 }
3354 IEM_MC_ADVANCE_RIP();
3355 IEM_MC_END();
3356 }
3357 else
3358 {
3359 /* [mem], XMM */
3360 IEM_MC_BEGIN(0, 2);
3361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3362 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3365 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3366 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3367 {
3368 IEM_MC_LOCAL(uint64_t, u64Tmp);
3369 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3370 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3371 }
3372 else
3373 {
3374 IEM_MC_LOCAL(uint32_t, u32Tmp);
3375 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3376 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3377 }
3378 IEM_MC_ADVANCE_RIP();
3379 IEM_MC_END();
3380 }
3381 return VINF_SUCCESS;
3382}
3383
3384/** Opcode 0xf3 0x0f 0x7e - vmovq Vq, Wq */
3385FNIEMOP_STUB(iemOp_vmovq_Vq_Wq);
3386/* Opcode 0xf2 0x0f 0x7e - invalid */
3387
3388
3389/** Opcode 0x0f 0x7f - movq Qq, Pq */
3390FNIEMOP_DEF(iemOp_movq_Qq_Pq)
3391{
3392 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3393 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3394 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3395 {
3396 /*
3397 * Register, register.
3398 */
3399 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3400 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3402 IEM_MC_BEGIN(0, 1);
3403 IEM_MC_LOCAL(uint64_t, u64Tmp);
3404 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3405 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3406 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3407 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3408 IEM_MC_ADVANCE_RIP();
3409 IEM_MC_END();
3410 }
3411 else
3412 {
3413 /*
3414 * Register, memory.
3415 */
3416 IEM_MC_BEGIN(0, 2);
3417 IEM_MC_LOCAL(uint64_t, u64Tmp);
3418 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3419
3420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3422 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3423 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3424
3425 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3426 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3427
3428 IEM_MC_ADVANCE_RIP();
3429 IEM_MC_END();
3430 }
3431 return VINF_SUCCESS;
3432}
3433
3434/** Opcode 0x66 0x0f 0x7f - vmovdqa Wx,Vx */
3435FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
3436{
3437 IEMOP_MNEMONIC(vmovdqa_Wdq_Vdq, "vmovdqa Wx,Vx");
3438 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3439 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3440 {
3441 /*
3442 * Register, register.
3443 */
3444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3445 IEM_MC_BEGIN(0, 0);
3446 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3447 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3448 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3449 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3450 IEM_MC_ADVANCE_RIP();
3451 IEM_MC_END();
3452 }
3453 else
3454 {
3455 /*
3456 * Register, memory.
3457 */
3458 IEM_MC_BEGIN(0, 2);
3459 IEM_MC_LOCAL(uint128_t, u128Tmp);
3460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3461
3462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3464 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3465 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3466
3467 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3468 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3469
3470 IEM_MC_ADVANCE_RIP();
3471 IEM_MC_END();
3472 }
3473 return VINF_SUCCESS;
3474}
3475
3476/** Opcode 0xf3 0x0f 0x7f - vmovdqu Wx,Vx */
3477FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
3478{
3479 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3480 IEMOP_MNEMONIC(vmovdqu_Wdq_Vdq, "vmovdqu Wx,Vx");
3481 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3482 {
3483 /*
3484 * Register, register.
3485 */
3486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3487 IEM_MC_BEGIN(0, 0);
3488 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3489 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3490 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3491 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3492 IEM_MC_ADVANCE_RIP();
3493 IEM_MC_END();
3494 }
3495 else
3496 {
3497 /*
3498 * Register, memory.
3499 */
3500 IEM_MC_BEGIN(0, 2);
3501 IEM_MC_LOCAL(uint128_t, u128Tmp);
3502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3503
3504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3506 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3507 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3508
3509 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3510 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3511
3512 IEM_MC_ADVANCE_RIP();
3513 IEM_MC_END();
3514 }
3515 return VINF_SUCCESS;
3516}
3517
3518/* Opcode 0xf2 0x0f 0x7f - invalid */
3519
3520
3521
3522/** Opcode 0x0f 0x80. */
3523FNIEMOP_DEF(iemOp_jo_Jv)
3524{
3525 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3526 IEMOP_HLP_MIN_386();
3527 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3528 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3529 {
3530 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3532
3533 IEM_MC_BEGIN(0, 0);
3534 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3535 IEM_MC_REL_JMP_S16(i16Imm);
3536 } IEM_MC_ELSE() {
3537 IEM_MC_ADVANCE_RIP();
3538 } IEM_MC_ENDIF();
3539 IEM_MC_END();
3540 }
3541 else
3542 {
3543 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3545
3546 IEM_MC_BEGIN(0, 0);
3547 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3548 IEM_MC_REL_JMP_S32(i32Imm);
3549 } IEM_MC_ELSE() {
3550 IEM_MC_ADVANCE_RIP();
3551 } IEM_MC_ENDIF();
3552 IEM_MC_END();
3553 }
3554 return VINF_SUCCESS;
3555}
3556
3557
3558/** Opcode 0x0f 0x81. */
3559FNIEMOP_DEF(iemOp_jno_Jv)
3560{
3561 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3562 IEMOP_HLP_MIN_386();
3563 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3564 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3565 {
3566 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3568
3569 IEM_MC_BEGIN(0, 0);
3570 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3571 IEM_MC_ADVANCE_RIP();
3572 } IEM_MC_ELSE() {
3573 IEM_MC_REL_JMP_S16(i16Imm);
3574 } IEM_MC_ENDIF();
3575 IEM_MC_END();
3576 }
3577 else
3578 {
3579 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3581
3582 IEM_MC_BEGIN(0, 0);
3583 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3584 IEM_MC_ADVANCE_RIP();
3585 } IEM_MC_ELSE() {
3586 IEM_MC_REL_JMP_S32(i32Imm);
3587 } IEM_MC_ENDIF();
3588 IEM_MC_END();
3589 }
3590 return VINF_SUCCESS;
3591}
3592
3593
3594/** Opcode 0x0f 0x82. */
3595FNIEMOP_DEF(iemOp_jc_Jv)
3596{
3597 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3598 IEMOP_HLP_MIN_386();
3599 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3600 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3601 {
3602 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3604
3605 IEM_MC_BEGIN(0, 0);
3606 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3607 IEM_MC_REL_JMP_S16(i16Imm);
3608 } IEM_MC_ELSE() {
3609 IEM_MC_ADVANCE_RIP();
3610 } IEM_MC_ENDIF();
3611 IEM_MC_END();
3612 }
3613 else
3614 {
3615 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3617
3618 IEM_MC_BEGIN(0, 0);
3619 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3620 IEM_MC_REL_JMP_S32(i32Imm);
3621 } IEM_MC_ELSE() {
3622 IEM_MC_ADVANCE_RIP();
3623 } IEM_MC_ENDIF();
3624 IEM_MC_END();
3625 }
3626 return VINF_SUCCESS;
3627}
3628
3629
3630/** Opcode 0x0f 0x83. */
3631FNIEMOP_DEF(iemOp_jnc_Jv)
3632{
3633 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
3634 IEMOP_HLP_MIN_386();
3635 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3636 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3637 {
3638 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3640
3641 IEM_MC_BEGIN(0, 0);
3642 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3643 IEM_MC_ADVANCE_RIP();
3644 } IEM_MC_ELSE() {
3645 IEM_MC_REL_JMP_S16(i16Imm);
3646 } IEM_MC_ENDIF();
3647 IEM_MC_END();
3648 }
3649 else
3650 {
3651 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3653
3654 IEM_MC_BEGIN(0, 0);
3655 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3656 IEM_MC_ADVANCE_RIP();
3657 } IEM_MC_ELSE() {
3658 IEM_MC_REL_JMP_S32(i32Imm);
3659 } IEM_MC_ENDIF();
3660 IEM_MC_END();
3661 }
3662 return VINF_SUCCESS;
3663}
3664
3665
3666/** Opcode 0x0f 0x84. */
3667FNIEMOP_DEF(iemOp_je_Jv)
3668{
3669 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
3670 IEMOP_HLP_MIN_386();
3671 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3672 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3673 {
3674 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3676
3677 IEM_MC_BEGIN(0, 0);
3678 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3679 IEM_MC_REL_JMP_S16(i16Imm);
3680 } IEM_MC_ELSE() {
3681 IEM_MC_ADVANCE_RIP();
3682 } IEM_MC_ENDIF();
3683 IEM_MC_END();
3684 }
3685 else
3686 {
3687 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3689
3690 IEM_MC_BEGIN(0, 0);
3691 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3692 IEM_MC_REL_JMP_S32(i32Imm);
3693 } IEM_MC_ELSE() {
3694 IEM_MC_ADVANCE_RIP();
3695 } IEM_MC_ENDIF();
3696 IEM_MC_END();
3697 }
3698 return VINF_SUCCESS;
3699}
3700
3701
3702/** Opcode 0x0f 0x85. */
3703FNIEMOP_DEF(iemOp_jne_Jv)
3704{
3705 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
3706 IEMOP_HLP_MIN_386();
3707 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3708 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3709 {
3710 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3712
3713 IEM_MC_BEGIN(0, 0);
3714 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3715 IEM_MC_ADVANCE_RIP();
3716 } IEM_MC_ELSE() {
3717 IEM_MC_REL_JMP_S16(i16Imm);
3718 } IEM_MC_ENDIF();
3719 IEM_MC_END();
3720 }
3721 else
3722 {
3723 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3725
3726 IEM_MC_BEGIN(0, 0);
3727 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3728 IEM_MC_ADVANCE_RIP();
3729 } IEM_MC_ELSE() {
3730 IEM_MC_REL_JMP_S32(i32Imm);
3731 } IEM_MC_ENDIF();
3732 IEM_MC_END();
3733 }
3734 return VINF_SUCCESS;
3735}
3736
3737
3738/** Opcode 0x0f 0x86. */
3739FNIEMOP_DEF(iemOp_jbe_Jv)
3740{
3741 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
3742 IEMOP_HLP_MIN_386();
3743 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3744 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3745 {
3746 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3748
3749 IEM_MC_BEGIN(0, 0);
3750 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3751 IEM_MC_REL_JMP_S16(i16Imm);
3752 } IEM_MC_ELSE() {
3753 IEM_MC_ADVANCE_RIP();
3754 } IEM_MC_ENDIF();
3755 IEM_MC_END();
3756 }
3757 else
3758 {
3759 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3761
3762 IEM_MC_BEGIN(0, 0);
3763 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3764 IEM_MC_REL_JMP_S32(i32Imm);
3765 } IEM_MC_ELSE() {
3766 IEM_MC_ADVANCE_RIP();
3767 } IEM_MC_ENDIF();
3768 IEM_MC_END();
3769 }
3770 return VINF_SUCCESS;
3771}
3772
3773
3774/** Opcode 0x0f 0x87. */
3775FNIEMOP_DEF(iemOp_jnbe_Jv)
3776{
3777 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
3778 IEMOP_HLP_MIN_386();
3779 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3780 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3781 {
3782 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3784
3785 IEM_MC_BEGIN(0, 0);
3786 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3787 IEM_MC_ADVANCE_RIP();
3788 } IEM_MC_ELSE() {
3789 IEM_MC_REL_JMP_S16(i16Imm);
3790 } IEM_MC_ENDIF();
3791 IEM_MC_END();
3792 }
3793 else
3794 {
3795 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3797
3798 IEM_MC_BEGIN(0, 0);
3799 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3800 IEM_MC_ADVANCE_RIP();
3801 } IEM_MC_ELSE() {
3802 IEM_MC_REL_JMP_S32(i32Imm);
3803 } IEM_MC_ENDIF();
3804 IEM_MC_END();
3805 }
3806 return VINF_SUCCESS;
3807}
3808
3809
3810/** Opcode 0x0f 0x88. */
3811FNIEMOP_DEF(iemOp_js_Jv)
3812{
3813 IEMOP_MNEMONIC(js_Jv, "js Jv");
3814 IEMOP_HLP_MIN_386();
3815 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3816 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3817 {
3818 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3820
3821 IEM_MC_BEGIN(0, 0);
3822 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3823 IEM_MC_REL_JMP_S16(i16Imm);
3824 } IEM_MC_ELSE() {
3825 IEM_MC_ADVANCE_RIP();
3826 } IEM_MC_ENDIF();
3827 IEM_MC_END();
3828 }
3829 else
3830 {
3831 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3833
3834 IEM_MC_BEGIN(0, 0);
3835 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3836 IEM_MC_REL_JMP_S32(i32Imm);
3837 } IEM_MC_ELSE() {
3838 IEM_MC_ADVANCE_RIP();
3839 } IEM_MC_ENDIF();
3840 IEM_MC_END();
3841 }
3842 return VINF_SUCCESS;
3843}
3844
3845
3846/** Opcode 0x0f 0x89. */
3847FNIEMOP_DEF(iemOp_jns_Jv)
3848{
3849 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
3850 IEMOP_HLP_MIN_386();
3851 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3852 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3853 {
3854 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3856
3857 IEM_MC_BEGIN(0, 0);
3858 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3859 IEM_MC_ADVANCE_RIP();
3860 } IEM_MC_ELSE() {
3861 IEM_MC_REL_JMP_S16(i16Imm);
3862 } IEM_MC_ENDIF();
3863 IEM_MC_END();
3864 }
3865 else
3866 {
3867 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3869
3870 IEM_MC_BEGIN(0, 0);
3871 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3872 IEM_MC_ADVANCE_RIP();
3873 } IEM_MC_ELSE() {
3874 IEM_MC_REL_JMP_S32(i32Imm);
3875 } IEM_MC_ENDIF();
3876 IEM_MC_END();
3877 }
3878 return VINF_SUCCESS;
3879}
3880
3881
3882/** Opcode 0x0f 0x8a. */
3883FNIEMOP_DEF(iemOp_jp_Jv)
3884{
3885 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
3886 IEMOP_HLP_MIN_386();
3887 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3888 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3889 {
3890 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3892
3893 IEM_MC_BEGIN(0, 0);
3894 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3895 IEM_MC_REL_JMP_S16(i16Imm);
3896 } IEM_MC_ELSE() {
3897 IEM_MC_ADVANCE_RIP();
3898 } IEM_MC_ENDIF();
3899 IEM_MC_END();
3900 }
3901 else
3902 {
3903 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3905
3906 IEM_MC_BEGIN(0, 0);
3907 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3908 IEM_MC_REL_JMP_S32(i32Imm);
3909 } IEM_MC_ELSE() {
3910 IEM_MC_ADVANCE_RIP();
3911 } IEM_MC_ENDIF();
3912 IEM_MC_END();
3913 }
3914 return VINF_SUCCESS;
3915}
3916
3917
3918/** Opcode 0x0f 0x8b. */
3919FNIEMOP_DEF(iemOp_jnp_Jv)
3920{
3921 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
3922 IEMOP_HLP_MIN_386();
3923 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3924 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3925 {
3926 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3928
3929 IEM_MC_BEGIN(0, 0);
3930 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3931 IEM_MC_ADVANCE_RIP();
3932 } IEM_MC_ELSE() {
3933 IEM_MC_REL_JMP_S16(i16Imm);
3934 } IEM_MC_ENDIF();
3935 IEM_MC_END();
3936 }
3937 else
3938 {
3939 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3941
3942 IEM_MC_BEGIN(0, 0);
3943 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3944 IEM_MC_ADVANCE_RIP();
3945 } IEM_MC_ELSE() {
3946 IEM_MC_REL_JMP_S32(i32Imm);
3947 } IEM_MC_ENDIF();
3948 IEM_MC_END();
3949 }
3950 return VINF_SUCCESS;
3951}
3952
3953
3954/** Opcode 0x0f 0x8c. */
3955FNIEMOP_DEF(iemOp_jl_Jv)
3956{
3957 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
3958 IEMOP_HLP_MIN_386();
3959 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3960 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3961 {
3962 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3964
3965 IEM_MC_BEGIN(0, 0);
3966 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3967 IEM_MC_REL_JMP_S16(i16Imm);
3968 } IEM_MC_ELSE() {
3969 IEM_MC_ADVANCE_RIP();
3970 } IEM_MC_ENDIF();
3971 IEM_MC_END();
3972 }
3973 else
3974 {
3975 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3977
3978 IEM_MC_BEGIN(0, 0);
3979 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3980 IEM_MC_REL_JMP_S32(i32Imm);
3981 } IEM_MC_ELSE() {
3982 IEM_MC_ADVANCE_RIP();
3983 } IEM_MC_ENDIF();
3984 IEM_MC_END();
3985 }
3986 return VINF_SUCCESS;
3987}
3988
3989
3990/** Opcode 0x0f 0x8d. */
3991FNIEMOP_DEF(iemOp_jnl_Jv)
3992{
3993 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
3994 IEMOP_HLP_MIN_386();
3995 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3996 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3997 {
3998 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4000
4001 IEM_MC_BEGIN(0, 0);
4002 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4003 IEM_MC_ADVANCE_RIP();
4004 } IEM_MC_ELSE() {
4005 IEM_MC_REL_JMP_S16(i16Imm);
4006 } IEM_MC_ENDIF();
4007 IEM_MC_END();
4008 }
4009 else
4010 {
4011 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4013
4014 IEM_MC_BEGIN(0, 0);
4015 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4016 IEM_MC_ADVANCE_RIP();
4017 } IEM_MC_ELSE() {
4018 IEM_MC_REL_JMP_S32(i32Imm);
4019 } IEM_MC_ENDIF();
4020 IEM_MC_END();
4021 }
4022 return VINF_SUCCESS;
4023}
4024
4025
4026/** Opcode 0x0f 0x8e. */
4027FNIEMOP_DEF(iemOp_jle_Jv)
4028{
4029 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4030 IEMOP_HLP_MIN_386();
4031 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4032 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4033 {
4034 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4036
4037 IEM_MC_BEGIN(0, 0);
4038 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4039 IEM_MC_REL_JMP_S16(i16Imm);
4040 } IEM_MC_ELSE() {
4041 IEM_MC_ADVANCE_RIP();
4042 } IEM_MC_ENDIF();
4043 IEM_MC_END();
4044 }
4045 else
4046 {
4047 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4049
4050 IEM_MC_BEGIN(0, 0);
4051 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4052 IEM_MC_REL_JMP_S32(i32Imm);
4053 } IEM_MC_ELSE() {
4054 IEM_MC_ADVANCE_RIP();
4055 } IEM_MC_ENDIF();
4056 IEM_MC_END();
4057 }
4058 return VINF_SUCCESS;
4059}
4060
4061
4062/** Opcode 0x0f 0x8f. */
4063FNIEMOP_DEF(iemOp_jnle_Jv)
4064{
4065 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4066 IEMOP_HLP_MIN_386();
4067 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4068 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4069 {
4070 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4072
4073 IEM_MC_BEGIN(0, 0);
4074 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4075 IEM_MC_ADVANCE_RIP();
4076 } IEM_MC_ELSE() {
4077 IEM_MC_REL_JMP_S16(i16Imm);
4078 } IEM_MC_ENDIF();
4079 IEM_MC_END();
4080 }
4081 else
4082 {
4083 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4085
4086 IEM_MC_BEGIN(0, 0);
4087 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4088 IEM_MC_ADVANCE_RIP();
4089 } IEM_MC_ELSE() {
4090 IEM_MC_REL_JMP_S32(i32Imm);
4091 } IEM_MC_ENDIF();
4092 IEM_MC_END();
4093 }
4094 return VINF_SUCCESS;
4095}
4096
4097
4098/** Opcode 0x0f 0x90. */
4099FNIEMOP_DEF(iemOp_seto_Eb)
4100{
4101 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4102 IEMOP_HLP_MIN_386();
4103 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4104
4105 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4106 * any way. AMD says it's "unused", whatever that means. We're
4107 * ignoring for now. */
4108 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4109 {
4110 /* register target */
4111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4112 IEM_MC_BEGIN(0, 0);
4113 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4114 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4115 } IEM_MC_ELSE() {
4116 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4117 } IEM_MC_ENDIF();
4118 IEM_MC_ADVANCE_RIP();
4119 IEM_MC_END();
4120 }
4121 else
4122 {
4123 /* memory target */
4124 IEM_MC_BEGIN(0, 1);
4125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4128 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4129 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4130 } IEM_MC_ELSE() {
4131 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4132 } IEM_MC_ENDIF();
4133 IEM_MC_ADVANCE_RIP();
4134 IEM_MC_END();
4135 }
4136 return VINF_SUCCESS;
4137}
4138
4139
4140/** Opcode 0x0f 0x91. */
4141FNIEMOP_DEF(iemOp_setno_Eb)
4142{
4143 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4144 IEMOP_HLP_MIN_386();
4145 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4146
4147 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4148 * any way. AMD says it's "unused", whatever that means. We're
4149 * ignoring for now. */
4150 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4151 {
4152 /* register target */
4153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4154 IEM_MC_BEGIN(0, 0);
4155 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4156 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4157 } IEM_MC_ELSE() {
4158 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4159 } IEM_MC_ENDIF();
4160 IEM_MC_ADVANCE_RIP();
4161 IEM_MC_END();
4162 }
4163 else
4164 {
4165 /* memory target */
4166 IEM_MC_BEGIN(0, 1);
4167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4170 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4171 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4172 } IEM_MC_ELSE() {
4173 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4174 } IEM_MC_ENDIF();
4175 IEM_MC_ADVANCE_RIP();
4176 IEM_MC_END();
4177 }
4178 return VINF_SUCCESS;
4179}
4180
4181
4182/** Opcode 0x0f 0x92. */
4183FNIEMOP_DEF(iemOp_setc_Eb)
4184{
4185 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4186 IEMOP_HLP_MIN_386();
4187 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4188
4189 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4190 * any way. AMD says it's "unused", whatever that means. We're
4191 * ignoring for now. */
4192 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4193 {
4194 /* register target */
4195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4196 IEM_MC_BEGIN(0, 0);
4197 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4198 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4199 } IEM_MC_ELSE() {
4200 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4201 } IEM_MC_ENDIF();
4202 IEM_MC_ADVANCE_RIP();
4203 IEM_MC_END();
4204 }
4205 else
4206 {
4207 /* memory target */
4208 IEM_MC_BEGIN(0, 1);
4209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4212 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4213 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4214 } IEM_MC_ELSE() {
4215 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4216 } IEM_MC_ENDIF();
4217 IEM_MC_ADVANCE_RIP();
4218 IEM_MC_END();
4219 }
4220 return VINF_SUCCESS;
4221}
4222
4223
4224/** Opcode 0x0f 0x93. */
4225FNIEMOP_DEF(iemOp_setnc_Eb)
4226{
4227 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4228 IEMOP_HLP_MIN_386();
4229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4230
4231 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4232 * any way. AMD says it's "unused", whatever that means. We're
4233 * ignoring for now. */
4234 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4235 {
4236 /* register target */
4237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4238 IEM_MC_BEGIN(0, 0);
4239 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4240 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4241 } IEM_MC_ELSE() {
4242 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4243 } IEM_MC_ENDIF();
4244 IEM_MC_ADVANCE_RIP();
4245 IEM_MC_END();
4246 }
4247 else
4248 {
4249 /* memory target */
4250 IEM_MC_BEGIN(0, 1);
4251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4254 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4255 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4256 } IEM_MC_ELSE() {
4257 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4258 } IEM_MC_ENDIF();
4259 IEM_MC_ADVANCE_RIP();
4260 IEM_MC_END();
4261 }
4262 return VINF_SUCCESS;
4263}
4264
4265
4266/** Opcode 0x0f 0x94. */
4267FNIEMOP_DEF(iemOp_sete_Eb)
4268{
4269 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4270 IEMOP_HLP_MIN_386();
4271 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4272
4273 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4274 * any way. AMD says it's "unused", whatever that means. We're
4275 * ignoring for now. */
4276 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4277 {
4278 /* register target */
4279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4280 IEM_MC_BEGIN(0, 0);
4281 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4282 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4283 } IEM_MC_ELSE() {
4284 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4285 } IEM_MC_ENDIF();
4286 IEM_MC_ADVANCE_RIP();
4287 IEM_MC_END();
4288 }
4289 else
4290 {
4291 /* memory target */
4292 IEM_MC_BEGIN(0, 1);
4293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4294 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4296 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4297 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4298 } IEM_MC_ELSE() {
4299 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4300 } IEM_MC_ENDIF();
4301 IEM_MC_ADVANCE_RIP();
4302 IEM_MC_END();
4303 }
4304 return VINF_SUCCESS;
4305}
4306
4307
4308/** Opcode 0x0f 0x95. */
4309FNIEMOP_DEF(iemOp_setne_Eb)
4310{
4311 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4312 IEMOP_HLP_MIN_386();
4313 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4314
4315 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4316 * any way. AMD says it's "unused", whatever that means. We're
4317 * ignoring for now. */
4318 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4319 {
4320 /* register target */
4321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4322 IEM_MC_BEGIN(0, 0);
4323 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4324 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4325 } IEM_MC_ELSE() {
4326 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4327 } IEM_MC_ENDIF();
4328 IEM_MC_ADVANCE_RIP();
4329 IEM_MC_END();
4330 }
4331 else
4332 {
4333 /* memory target */
4334 IEM_MC_BEGIN(0, 1);
4335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4338 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4339 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4340 } IEM_MC_ELSE() {
4341 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4342 } IEM_MC_ENDIF();
4343 IEM_MC_ADVANCE_RIP();
4344 IEM_MC_END();
4345 }
4346 return VINF_SUCCESS;
4347}
4348
4349
4350/** Opcode 0x0f 0x96. */
4351FNIEMOP_DEF(iemOp_setbe_Eb)
4352{
4353 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4354 IEMOP_HLP_MIN_386();
4355 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4356
4357 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4358 * any way. AMD says it's "unused", whatever that means. We're
4359 * ignoring for now. */
4360 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4361 {
4362 /* register target */
4363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4364 IEM_MC_BEGIN(0, 0);
4365 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4366 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4367 } IEM_MC_ELSE() {
4368 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4369 } IEM_MC_ENDIF();
4370 IEM_MC_ADVANCE_RIP();
4371 IEM_MC_END();
4372 }
4373 else
4374 {
4375 /* memory target */
4376 IEM_MC_BEGIN(0, 1);
4377 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4380 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4381 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4382 } IEM_MC_ELSE() {
4383 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4384 } IEM_MC_ENDIF();
4385 IEM_MC_ADVANCE_RIP();
4386 IEM_MC_END();
4387 }
4388 return VINF_SUCCESS;
4389}
4390
4391
4392/** Opcode 0x0f 0x97. */
4393FNIEMOP_DEF(iemOp_setnbe_Eb)
4394{
4395 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4396 IEMOP_HLP_MIN_386();
4397 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4398
4399 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4400 * any way. AMD says it's "unused", whatever that means. We're
4401 * ignoring for now. */
4402 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4403 {
4404 /* register target */
4405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4406 IEM_MC_BEGIN(0, 0);
4407 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4408 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4409 } IEM_MC_ELSE() {
4410 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4411 } IEM_MC_ENDIF();
4412 IEM_MC_ADVANCE_RIP();
4413 IEM_MC_END();
4414 }
4415 else
4416 {
4417 /* memory target */
4418 IEM_MC_BEGIN(0, 1);
4419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4422 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4423 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4424 } IEM_MC_ELSE() {
4425 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4426 } IEM_MC_ENDIF();
4427 IEM_MC_ADVANCE_RIP();
4428 IEM_MC_END();
4429 }
4430 return VINF_SUCCESS;
4431}
4432
4433
4434/** Opcode 0x0f 0x98. */
4435FNIEMOP_DEF(iemOp_sets_Eb)
4436{
4437 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4438 IEMOP_HLP_MIN_386();
4439 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4440
4441 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4442 * any way. AMD says it's "unused", whatever that means. We're
4443 * ignoring for now. */
4444 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4445 {
4446 /* register target */
4447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4448 IEM_MC_BEGIN(0, 0);
4449 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4450 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4451 } IEM_MC_ELSE() {
4452 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4453 } IEM_MC_ENDIF();
4454 IEM_MC_ADVANCE_RIP();
4455 IEM_MC_END();
4456 }
4457 else
4458 {
4459 /* memory target */
4460 IEM_MC_BEGIN(0, 1);
4461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4464 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4465 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4466 } IEM_MC_ELSE() {
4467 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4468 } IEM_MC_ENDIF();
4469 IEM_MC_ADVANCE_RIP();
4470 IEM_MC_END();
4471 }
4472 return VINF_SUCCESS;
4473}
4474
4475
4476/** Opcode 0x0f 0x99. */
4477FNIEMOP_DEF(iemOp_setns_Eb)
4478{
4479 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4480 IEMOP_HLP_MIN_386();
4481 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4482
4483 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4484 * any way. AMD says it's "unused", whatever that means. We're
4485 * ignoring for now. */
4486 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4487 {
4488 /* register target */
4489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4490 IEM_MC_BEGIN(0, 0);
4491 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4492 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4493 } IEM_MC_ELSE() {
4494 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4495 } IEM_MC_ENDIF();
4496 IEM_MC_ADVANCE_RIP();
4497 IEM_MC_END();
4498 }
4499 else
4500 {
4501 /* memory target */
4502 IEM_MC_BEGIN(0, 1);
4503 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4506 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4507 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4508 } IEM_MC_ELSE() {
4509 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4510 } IEM_MC_ENDIF();
4511 IEM_MC_ADVANCE_RIP();
4512 IEM_MC_END();
4513 }
4514 return VINF_SUCCESS;
4515}
4516
4517
4518/** Opcode 0x0f 0x9a. */
4519FNIEMOP_DEF(iemOp_setp_Eb)
4520{
4521 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4522 IEMOP_HLP_MIN_386();
4523 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4524
4525 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4526 * any way. AMD says it's "unused", whatever that means. We're
4527 * ignoring for now. */
4528 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4529 {
4530 /* register target */
4531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4532 IEM_MC_BEGIN(0, 0);
4533 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4534 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4535 } IEM_MC_ELSE() {
4536 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4537 } IEM_MC_ENDIF();
4538 IEM_MC_ADVANCE_RIP();
4539 IEM_MC_END();
4540 }
4541 else
4542 {
4543 /* memory target */
4544 IEM_MC_BEGIN(0, 1);
4545 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4546 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4548 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4549 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4550 } IEM_MC_ELSE() {
4551 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4552 } IEM_MC_ENDIF();
4553 IEM_MC_ADVANCE_RIP();
4554 IEM_MC_END();
4555 }
4556 return VINF_SUCCESS;
4557}
4558
4559
4560/** Opcode 0x0f 0x9b. */
4561FNIEMOP_DEF(iemOp_setnp_Eb)
4562{
4563 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4564 IEMOP_HLP_MIN_386();
4565 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4566
4567 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4568 * any way. AMD says it's "unused", whatever that means. We're
4569 * ignoring for now. */
4570 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4571 {
4572 /* register target */
4573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4574 IEM_MC_BEGIN(0, 0);
4575 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4576 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4577 } IEM_MC_ELSE() {
4578 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4579 } IEM_MC_ENDIF();
4580 IEM_MC_ADVANCE_RIP();
4581 IEM_MC_END();
4582 }
4583 else
4584 {
4585 /* memory target */
4586 IEM_MC_BEGIN(0, 1);
4587 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4590 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4591 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4592 } IEM_MC_ELSE() {
4593 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4594 } IEM_MC_ENDIF();
4595 IEM_MC_ADVANCE_RIP();
4596 IEM_MC_END();
4597 }
4598 return VINF_SUCCESS;
4599}
4600
4601
4602/** Opcode 0x0f 0x9c. */
4603FNIEMOP_DEF(iemOp_setl_Eb)
4604{
4605 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4606 IEMOP_HLP_MIN_386();
4607 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4608
4609 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4610 * any way. AMD says it's "unused", whatever that means. We're
4611 * ignoring for now. */
4612 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4613 {
4614 /* register target */
4615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4616 IEM_MC_BEGIN(0, 0);
4617 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4618 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4619 } IEM_MC_ELSE() {
4620 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4621 } IEM_MC_ENDIF();
4622 IEM_MC_ADVANCE_RIP();
4623 IEM_MC_END();
4624 }
4625 else
4626 {
4627 /* memory target */
4628 IEM_MC_BEGIN(0, 1);
4629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4632 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4633 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4634 } IEM_MC_ELSE() {
4635 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4636 } IEM_MC_ENDIF();
4637 IEM_MC_ADVANCE_RIP();
4638 IEM_MC_END();
4639 }
4640 return VINF_SUCCESS;
4641}
4642
4643
4644/** Opcode 0x0f 0x9d. */
4645FNIEMOP_DEF(iemOp_setnl_Eb)
4646{
4647 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
4648 IEMOP_HLP_MIN_386();
4649 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4650
4651 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4652 * any way. AMD says it's "unused", whatever that means. We're
4653 * ignoring for now. */
4654 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4655 {
4656 /* register target */
4657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4658 IEM_MC_BEGIN(0, 0);
4659 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4660 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4661 } IEM_MC_ELSE() {
4662 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4663 } IEM_MC_ENDIF();
4664 IEM_MC_ADVANCE_RIP();
4665 IEM_MC_END();
4666 }
4667 else
4668 {
4669 /* memory target */
4670 IEM_MC_BEGIN(0, 1);
4671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4672 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4674 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4675 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4676 } IEM_MC_ELSE() {
4677 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4678 } IEM_MC_ENDIF();
4679 IEM_MC_ADVANCE_RIP();
4680 IEM_MC_END();
4681 }
4682 return VINF_SUCCESS;
4683}
4684
4685
4686/** Opcode 0x0f 0x9e. */
4687FNIEMOP_DEF(iemOp_setle_Eb)
4688{
4689 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
4690 IEMOP_HLP_MIN_386();
4691 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4692
4693 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4694 * any way. AMD says it's "unused", whatever that means. We're
4695 * ignoring for now. */
4696 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4697 {
4698 /* register target */
4699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4700 IEM_MC_BEGIN(0, 0);
4701 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4702 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4703 } IEM_MC_ELSE() {
4704 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4705 } IEM_MC_ENDIF();
4706 IEM_MC_ADVANCE_RIP();
4707 IEM_MC_END();
4708 }
4709 else
4710 {
4711 /* memory target */
4712 IEM_MC_BEGIN(0, 1);
4713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4716 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4717 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4718 } IEM_MC_ELSE() {
4719 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4720 } IEM_MC_ENDIF();
4721 IEM_MC_ADVANCE_RIP();
4722 IEM_MC_END();
4723 }
4724 return VINF_SUCCESS;
4725}
4726
4727
4728/** Opcode 0x0f 0x9f. */
4729FNIEMOP_DEF(iemOp_setnle_Eb)
4730{
4731 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
4732 IEMOP_HLP_MIN_386();
4733 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4734
4735 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4736 * any way. AMD says it's "unused", whatever that means. We're
4737 * ignoring for now. */
4738 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4739 {
4740 /* register target */
4741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4742 IEM_MC_BEGIN(0, 0);
4743 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4744 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4745 } IEM_MC_ELSE() {
4746 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4747 } IEM_MC_ENDIF();
4748 IEM_MC_ADVANCE_RIP();
4749 IEM_MC_END();
4750 }
4751 else
4752 {
4753 /* memory target */
4754 IEM_MC_BEGIN(0, 1);
4755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4758 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4759 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4760 } IEM_MC_ELSE() {
4761 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4762 } IEM_MC_ENDIF();
4763 IEM_MC_ADVANCE_RIP();
4764 IEM_MC_END();
4765 }
4766 return VINF_SUCCESS;
4767}
4768
4769
4770/**
4771 * Common 'push segment-register' helper.
4772 */
4773FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4774{
4775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4776 if (iReg < X86_SREG_FS)
4777 IEMOP_HLP_NO_64BIT();
4778 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4779
4780 switch (pVCpu->iem.s.enmEffOpSize)
4781 {
4782 case IEMMODE_16BIT:
4783 IEM_MC_BEGIN(0, 1);
4784 IEM_MC_LOCAL(uint16_t, u16Value);
4785 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4786 IEM_MC_PUSH_U16(u16Value);
4787 IEM_MC_ADVANCE_RIP();
4788 IEM_MC_END();
4789 break;
4790
4791 case IEMMODE_32BIT:
4792 IEM_MC_BEGIN(0, 1);
4793 IEM_MC_LOCAL(uint32_t, u32Value);
4794 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4795 IEM_MC_PUSH_U32_SREG(u32Value);
4796 IEM_MC_ADVANCE_RIP();
4797 IEM_MC_END();
4798 break;
4799
4800 case IEMMODE_64BIT:
4801 IEM_MC_BEGIN(0, 1);
4802 IEM_MC_LOCAL(uint64_t, u64Value);
4803 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4804 IEM_MC_PUSH_U64(u64Value);
4805 IEM_MC_ADVANCE_RIP();
4806 IEM_MC_END();
4807 break;
4808 }
4809
4810 return VINF_SUCCESS;
4811}
4812
4813
4814/** Opcode 0x0f 0xa0. */
4815FNIEMOP_DEF(iemOp_push_fs)
4816{
4817 IEMOP_MNEMONIC(push_fs, "push fs");
4818 IEMOP_HLP_MIN_386();
4819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4820 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4821}
4822
4823
4824/** Opcode 0x0f 0xa1. */
4825FNIEMOP_DEF(iemOp_pop_fs)
4826{
4827 IEMOP_MNEMONIC(pop_fs, "pop fs");
4828 IEMOP_HLP_MIN_386();
4829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4830 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
4831}
4832
4833
4834/** Opcode 0x0f 0xa2. */
4835FNIEMOP_DEF(iemOp_cpuid)
4836{
4837 IEMOP_MNEMONIC(cpuid, "cpuid");
4838 IEMOP_HLP_MIN_486(); /* not all 486es. */
4839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4840 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4841}
4842
4843
4844/**
4845 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4846 * iemOp_bts_Ev_Gv.
4847 */
4848FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4849{
4850 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4851 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4852
4853 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4854 {
4855 /* register destination. */
4856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4857 switch (pVCpu->iem.s.enmEffOpSize)
4858 {
4859 case IEMMODE_16BIT:
4860 IEM_MC_BEGIN(3, 0);
4861 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4862 IEM_MC_ARG(uint16_t, u16Src, 1);
4863 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4864
4865 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4866 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4867 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4868 IEM_MC_REF_EFLAGS(pEFlags);
4869 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4870
4871 IEM_MC_ADVANCE_RIP();
4872 IEM_MC_END();
4873 return VINF_SUCCESS;
4874
4875 case IEMMODE_32BIT:
4876 IEM_MC_BEGIN(3, 0);
4877 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4878 IEM_MC_ARG(uint32_t, u32Src, 1);
4879 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4880
4881 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4882 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4883 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4884 IEM_MC_REF_EFLAGS(pEFlags);
4885 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4886
4887 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4888 IEM_MC_ADVANCE_RIP();
4889 IEM_MC_END();
4890 return VINF_SUCCESS;
4891
4892 case IEMMODE_64BIT:
4893 IEM_MC_BEGIN(3, 0);
4894 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4895 IEM_MC_ARG(uint64_t, u64Src, 1);
4896 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4897
4898 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4899 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4900 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4901 IEM_MC_REF_EFLAGS(pEFlags);
4902 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4903
4904 IEM_MC_ADVANCE_RIP();
4905 IEM_MC_END();
4906 return VINF_SUCCESS;
4907
4908 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4909 }
4910 }
4911 else
4912 {
4913 /* memory destination. */
4914
4915 uint32_t fAccess;
4916 if (pImpl->pfnLockedU16)
4917 fAccess = IEM_ACCESS_DATA_RW;
4918 else /* BT */
4919 fAccess = IEM_ACCESS_DATA_R;
4920
4921 /** @todo test negative bit offsets! */
4922 switch (pVCpu->iem.s.enmEffOpSize)
4923 {
4924 case IEMMODE_16BIT:
4925 IEM_MC_BEGIN(3, 2);
4926 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4927 IEM_MC_ARG(uint16_t, u16Src, 1);
4928 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4929 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4930 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4931
4932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4933 if (pImpl->pfnLockedU16)
4934 IEMOP_HLP_DONE_DECODING();
4935 else
4936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4937 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4938 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4939 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4940 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4941 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
4942 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4943 IEM_MC_FETCH_EFLAGS(EFlags);
4944
4945 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4946 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4947 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4948 else
4949 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4950 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
4951
4952 IEM_MC_COMMIT_EFLAGS(EFlags);
4953 IEM_MC_ADVANCE_RIP();
4954 IEM_MC_END();
4955 return VINF_SUCCESS;
4956
4957 case IEMMODE_32BIT:
4958 IEM_MC_BEGIN(3, 2);
4959 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4960 IEM_MC_ARG(uint32_t, u32Src, 1);
4961 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4963 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4964
4965 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4966 if (pImpl->pfnLockedU16)
4967 IEMOP_HLP_DONE_DECODING();
4968 else
4969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4970 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4971 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4972 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4973 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4974 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4975 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4976 IEM_MC_FETCH_EFLAGS(EFlags);
4977
4978 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4979 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4980 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4981 else
4982 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4983 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
4984
4985 IEM_MC_COMMIT_EFLAGS(EFlags);
4986 IEM_MC_ADVANCE_RIP();
4987 IEM_MC_END();
4988 return VINF_SUCCESS;
4989
4990 case IEMMODE_64BIT:
4991 IEM_MC_BEGIN(3, 2);
4992 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4993 IEM_MC_ARG(uint64_t, u64Src, 1);
4994 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4996 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4997
4998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4999 if (pImpl->pfnLockedU16)
5000 IEMOP_HLP_DONE_DECODING();
5001 else
5002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5003 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5004 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5005 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5006 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5007 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5008 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5009 IEM_MC_FETCH_EFLAGS(EFlags);
5010
5011 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5012 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5013 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5014 else
5015 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5016 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5017
5018 IEM_MC_COMMIT_EFLAGS(EFlags);
5019 IEM_MC_ADVANCE_RIP();
5020 IEM_MC_END();
5021 return VINF_SUCCESS;
5022
5023 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5024 }
5025 }
5026}
5027
5028
5029/** Opcode 0x0f 0xa3. */
5030FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5031{
5032 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5033 IEMOP_HLP_MIN_386();
5034 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5035}
5036
5037
5038/**
5039 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5040 */
5041FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5042{
5043 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5044 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5045
5046 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5047 {
5048 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5050
5051 switch (pVCpu->iem.s.enmEffOpSize)
5052 {
5053 case IEMMODE_16BIT:
5054 IEM_MC_BEGIN(4, 0);
5055 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5056 IEM_MC_ARG(uint16_t, u16Src, 1);
5057 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5058 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5059
5060 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5061 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5062 IEM_MC_REF_EFLAGS(pEFlags);
5063 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5064
5065 IEM_MC_ADVANCE_RIP();
5066 IEM_MC_END();
5067 return VINF_SUCCESS;
5068
5069 case IEMMODE_32BIT:
5070 IEM_MC_BEGIN(4, 0);
5071 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5072 IEM_MC_ARG(uint32_t, u32Src, 1);
5073 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5074 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5075
5076 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5077 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5078 IEM_MC_REF_EFLAGS(pEFlags);
5079 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5080
5081 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5082 IEM_MC_ADVANCE_RIP();
5083 IEM_MC_END();
5084 return VINF_SUCCESS;
5085
5086 case IEMMODE_64BIT:
5087 IEM_MC_BEGIN(4, 0);
5088 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5089 IEM_MC_ARG(uint64_t, u64Src, 1);
5090 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5091 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5092
5093 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5094 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5095 IEM_MC_REF_EFLAGS(pEFlags);
5096 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5097
5098 IEM_MC_ADVANCE_RIP();
5099 IEM_MC_END();
5100 return VINF_SUCCESS;
5101
5102 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5103 }
5104 }
5105 else
5106 {
5107 switch (pVCpu->iem.s.enmEffOpSize)
5108 {
5109 case IEMMODE_16BIT:
5110 IEM_MC_BEGIN(4, 2);
5111 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5112 IEM_MC_ARG(uint16_t, u16Src, 1);
5113 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5114 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5116
5117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5118 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5119 IEM_MC_ASSIGN(cShiftArg, cShift);
5120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5121 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5122 IEM_MC_FETCH_EFLAGS(EFlags);
5123 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5124 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5125
5126 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5127 IEM_MC_COMMIT_EFLAGS(EFlags);
5128 IEM_MC_ADVANCE_RIP();
5129 IEM_MC_END();
5130 return VINF_SUCCESS;
5131
5132 case IEMMODE_32BIT:
5133 IEM_MC_BEGIN(4, 2);
5134 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5135 IEM_MC_ARG(uint32_t, u32Src, 1);
5136 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5137 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5139
5140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5141 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5142 IEM_MC_ASSIGN(cShiftArg, cShift);
5143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5144 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5145 IEM_MC_FETCH_EFLAGS(EFlags);
5146 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5147 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5148
5149 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5150 IEM_MC_COMMIT_EFLAGS(EFlags);
5151 IEM_MC_ADVANCE_RIP();
5152 IEM_MC_END();
5153 return VINF_SUCCESS;
5154
5155 case IEMMODE_64BIT:
5156 IEM_MC_BEGIN(4, 2);
5157 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5158 IEM_MC_ARG(uint64_t, u64Src, 1);
5159 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5160 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5161 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5162
5163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5164 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5165 IEM_MC_ASSIGN(cShiftArg, cShift);
5166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5167 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5168 IEM_MC_FETCH_EFLAGS(EFlags);
5169 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5170 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5171
5172 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5173 IEM_MC_COMMIT_EFLAGS(EFlags);
5174 IEM_MC_ADVANCE_RIP();
5175 IEM_MC_END();
5176 return VINF_SUCCESS;
5177
5178 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5179 }
5180 }
5181}
5182
5183
5184/**
5185 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5186 */
5187FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5188{
5189 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5190 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5191
5192 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5193 {
5194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5195
5196 switch (pVCpu->iem.s.enmEffOpSize)
5197 {
5198 case IEMMODE_16BIT:
5199 IEM_MC_BEGIN(4, 0);
5200 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5201 IEM_MC_ARG(uint16_t, u16Src, 1);
5202 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5203 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5204
5205 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5206 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5207 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5208 IEM_MC_REF_EFLAGS(pEFlags);
5209 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5210
5211 IEM_MC_ADVANCE_RIP();
5212 IEM_MC_END();
5213 return VINF_SUCCESS;
5214
5215 case IEMMODE_32BIT:
5216 IEM_MC_BEGIN(4, 0);
5217 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5218 IEM_MC_ARG(uint32_t, u32Src, 1);
5219 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5220 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5221
5222 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5223 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5224 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5225 IEM_MC_REF_EFLAGS(pEFlags);
5226 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5227
5228 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5229 IEM_MC_ADVANCE_RIP();
5230 IEM_MC_END();
5231 return VINF_SUCCESS;
5232
5233 case IEMMODE_64BIT:
5234 IEM_MC_BEGIN(4, 0);
5235 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5236 IEM_MC_ARG(uint64_t, u64Src, 1);
5237 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5238 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5239
5240 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5241 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5242 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5243 IEM_MC_REF_EFLAGS(pEFlags);
5244 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5245
5246 IEM_MC_ADVANCE_RIP();
5247 IEM_MC_END();
5248 return VINF_SUCCESS;
5249
5250 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5251 }
5252 }
5253 else
5254 {
5255 switch (pVCpu->iem.s.enmEffOpSize)
5256 {
5257 case IEMMODE_16BIT:
5258 IEM_MC_BEGIN(4, 2);
5259 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5260 IEM_MC_ARG(uint16_t, u16Src, 1);
5261 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5262 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5264
5265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5267 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5268 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5269 IEM_MC_FETCH_EFLAGS(EFlags);
5270 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5271 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5272
5273 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5274 IEM_MC_COMMIT_EFLAGS(EFlags);
5275 IEM_MC_ADVANCE_RIP();
5276 IEM_MC_END();
5277 return VINF_SUCCESS;
5278
5279 case IEMMODE_32BIT:
5280 IEM_MC_BEGIN(4, 2);
5281 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5282 IEM_MC_ARG(uint32_t, u32Src, 1);
5283 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5284 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5286
5287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5289 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5290 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5291 IEM_MC_FETCH_EFLAGS(EFlags);
5292 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5293 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5294
5295 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5296 IEM_MC_COMMIT_EFLAGS(EFlags);
5297 IEM_MC_ADVANCE_RIP();
5298 IEM_MC_END();
5299 return VINF_SUCCESS;
5300
5301 case IEMMODE_64BIT:
5302 IEM_MC_BEGIN(4, 2);
5303 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5304 IEM_MC_ARG(uint64_t, u64Src, 1);
5305 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5306 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5308
5309 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5311 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5312 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5313 IEM_MC_FETCH_EFLAGS(EFlags);
5314 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5315 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5316
5317 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5318 IEM_MC_COMMIT_EFLAGS(EFlags);
5319 IEM_MC_ADVANCE_RIP();
5320 IEM_MC_END();
5321 return VINF_SUCCESS;
5322
5323 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5324 }
5325 }
5326}
5327
5328
5329
5330/** Opcode 0x0f 0xa4. */
5331FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5332{
5333 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5334 IEMOP_HLP_MIN_386();
5335 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5336}
5337
5338
5339/** Opcode 0x0f 0xa5. */
5340FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5341{
5342 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5343 IEMOP_HLP_MIN_386();
5344 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5345}
5346
5347
5348/** Opcode 0x0f 0xa8. */
5349FNIEMOP_DEF(iemOp_push_gs)
5350{
5351 IEMOP_MNEMONIC(push_gs, "push gs");
5352 IEMOP_HLP_MIN_386();
5353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5354 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5355}
5356
5357
5358/** Opcode 0x0f 0xa9. */
5359FNIEMOP_DEF(iemOp_pop_gs)
5360{
5361 IEMOP_MNEMONIC(pop_gs, "pop gs");
5362 IEMOP_HLP_MIN_386();
5363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5364 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5365}
5366
5367
5368/** Opcode 0x0f 0xaa. */
5369FNIEMOP_STUB(iemOp_rsm);
5370//IEMOP_HLP_MIN_386();
5371
5372
5373/** Opcode 0x0f 0xab. */
5374FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5375{
5376 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5377 IEMOP_HLP_MIN_386();
5378 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5379}
5380
5381
5382/** Opcode 0x0f 0xac. */
5383FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5384{
5385 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5386 IEMOP_HLP_MIN_386();
5387 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5388}
5389
5390
5391/** Opcode 0x0f 0xad. */
5392FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5393{
5394 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5395 IEMOP_HLP_MIN_386();
5396 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5397}
5398
5399
5400/** Opcode 0x0f 0xae mem/0. */
5401FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5402{
5403 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5404 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5405 return IEMOP_RAISE_INVALID_OPCODE();
5406
5407 IEM_MC_BEGIN(3, 1);
5408 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5409 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5410 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5413 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5414 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5415 IEM_MC_END();
5416 return VINF_SUCCESS;
5417}
5418
5419
5420/** Opcode 0x0f 0xae mem/1. */
5421FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5422{
5423 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5424 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5425 return IEMOP_RAISE_INVALID_OPCODE();
5426
5427 IEM_MC_BEGIN(3, 1);
5428 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5429 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5430 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5433 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5434 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5435 IEM_MC_END();
5436 return VINF_SUCCESS;
5437}
5438
5439
5440/** Opcode 0x0f 0xae mem/2. */
5441FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5442
5443/** Opcode 0x0f 0xae mem/3. */
5444FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5445
5446/** Opcode 0x0f 0xae mem/4. */
5447FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5448
5449/** Opcode 0x0f 0xae mem/5. */
5450FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5451
5452/** Opcode 0x0f 0xae mem/6. */
5453FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5454
5455/** Opcode 0x0f 0xae mem/7. */
5456FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5457
5458
5459/** Opcode 0x0f 0xae 11b/5. */
5460FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5461{
5462 RT_NOREF_PV(bRm);
5463 IEMOP_MNEMONIC(lfence, "lfence");
5464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5465 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5466 return IEMOP_RAISE_INVALID_OPCODE();
5467
5468 IEM_MC_BEGIN(0, 0);
5469 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5470 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5471 else
5472 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5473 IEM_MC_ADVANCE_RIP();
5474 IEM_MC_END();
5475 return VINF_SUCCESS;
5476}
5477
5478
5479/** Opcode 0x0f 0xae 11b/6. */
5480FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5481{
5482 RT_NOREF_PV(bRm);
5483 IEMOP_MNEMONIC(mfence, "mfence");
5484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5485 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5486 return IEMOP_RAISE_INVALID_OPCODE();
5487
5488 IEM_MC_BEGIN(0, 0);
5489 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5490 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5491 else
5492 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5493 IEM_MC_ADVANCE_RIP();
5494 IEM_MC_END();
5495 return VINF_SUCCESS;
5496}
5497
5498
5499/** Opcode 0x0f 0xae 11b/7. */
5500FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5501{
5502 RT_NOREF_PV(bRm);
5503 IEMOP_MNEMONIC(sfence, "sfence");
5504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5505 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5506 return IEMOP_RAISE_INVALID_OPCODE();
5507
5508 IEM_MC_BEGIN(0, 0);
5509 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5510 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5511 else
5512 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5513 IEM_MC_ADVANCE_RIP();
5514 IEM_MC_END();
5515 return VINF_SUCCESS;
5516}
5517
5518
5519/** Opcode 0xf3 0x0f 0xae 11b/0. */
5520FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5521
5522/** Opcode 0xf3 0x0f 0xae 11b/1. */
5523FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5524
5525/** Opcode 0xf3 0x0f 0xae 11b/2. */
5526FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5527
5528/** Opcode 0xf3 0x0f 0xae 11b/3. */
5529FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5530
5531
5532/** Opcode 0x0f 0xae. */
5533FNIEMOP_DEF(iemOp_Grp15)
5534{
5535 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5536 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5537 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5538 {
5539 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5540 {
5541 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5542 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5543 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5544 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5545 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5546 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5547 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5548 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5549 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5550 }
5551 }
5552 else
5553 {
5554 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5555 {
5556 case 0:
5557 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5558 {
5559 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5560 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5561 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5562 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5563 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5564 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5565 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5566 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5567 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5568 }
5569 break;
5570
5571 case IEM_OP_PRF_REPZ:
5572 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5573 {
5574 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5575 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5576 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5577 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5578 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5579 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5580 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5581 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5582 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5583 }
5584 break;
5585
5586 default:
5587 return IEMOP_RAISE_INVALID_OPCODE();
5588 }
5589 }
5590}
5591
5592
5593/** Opcode 0x0f 0xaf. */
5594FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5595{
5596 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
5597 IEMOP_HLP_MIN_386();
5598 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5599 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5600}
5601
5602
5603/** Opcode 0x0f 0xb0. */
5604FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5605{
5606 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
5607 IEMOP_HLP_MIN_486();
5608 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5609
5610 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5611 {
5612 IEMOP_HLP_DONE_DECODING();
5613 IEM_MC_BEGIN(4, 0);
5614 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5615 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5616 IEM_MC_ARG(uint8_t, u8Src, 2);
5617 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5618
5619 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5620 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5621 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5622 IEM_MC_REF_EFLAGS(pEFlags);
5623 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5624 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5625 else
5626 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5627
5628 IEM_MC_ADVANCE_RIP();
5629 IEM_MC_END();
5630 }
5631 else
5632 {
5633 IEM_MC_BEGIN(4, 3);
5634 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5635 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5636 IEM_MC_ARG(uint8_t, u8Src, 2);
5637 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5639 IEM_MC_LOCAL(uint8_t, u8Al);
5640
5641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5642 IEMOP_HLP_DONE_DECODING();
5643 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5644 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5645 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5646 IEM_MC_FETCH_EFLAGS(EFlags);
5647 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5648 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5649 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5650 else
5651 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5652
5653 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5654 IEM_MC_COMMIT_EFLAGS(EFlags);
5655 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5656 IEM_MC_ADVANCE_RIP();
5657 IEM_MC_END();
5658 }
5659 return VINF_SUCCESS;
5660}
5661
5662/** Opcode 0x0f 0xb1. */
5663FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5664{
5665 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
5666 IEMOP_HLP_MIN_486();
5667 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5668
5669 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5670 {
5671 IEMOP_HLP_DONE_DECODING();
5672 switch (pVCpu->iem.s.enmEffOpSize)
5673 {
5674 case IEMMODE_16BIT:
5675 IEM_MC_BEGIN(4, 0);
5676 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5677 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5678 IEM_MC_ARG(uint16_t, u16Src, 2);
5679 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5680
5681 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5682 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5683 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5684 IEM_MC_REF_EFLAGS(pEFlags);
5685 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5686 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5687 else
5688 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5689
5690 IEM_MC_ADVANCE_RIP();
5691 IEM_MC_END();
5692 return VINF_SUCCESS;
5693
5694 case IEMMODE_32BIT:
5695 IEM_MC_BEGIN(4, 0);
5696 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5697 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5698 IEM_MC_ARG(uint32_t, u32Src, 2);
5699 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5700
5701 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5702 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5703 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5704 IEM_MC_REF_EFLAGS(pEFlags);
5705 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5706 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5707 else
5708 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5709
5710 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5711 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5712 IEM_MC_ADVANCE_RIP();
5713 IEM_MC_END();
5714 return VINF_SUCCESS;
5715
5716 case IEMMODE_64BIT:
5717 IEM_MC_BEGIN(4, 0);
5718 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5719 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5720#ifdef RT_ARCH_X86
5721 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5722#else
5723 IEM_MC_ARG(uint64_t, u64Src, 2);
5724#endif
5725 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5726
5727 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5728 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5729 IEM_MC_REF_EFLAGS(pEFlags);
5730#ifdef RT_ARCH_X86
5731 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5732 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5733 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5734 else
5735 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5736#else
5737 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5738 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5739 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5740 else
5741 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5742#endif
5743
5744 IEM_MC_ADVANCE_RIP();
5745 IEM_MC_END();
5746 return VINF_SUCCESS;
5747
5748 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5749 }
5750 }
5751 else
5752 {
5753 switch (pVCpu->iem.s.enmEffOpSize)
5754 {
5755 case IEMMODE_16BIT:
5756 IEM_MC_BEGIN(4, 3);
5757 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5758 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5759 IEM_MC_ARG(uint16_t, u16Src, 2);
5760 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5762 IEM_MC_LOCAL(uint16_t, u16Ax);
5763
5764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5765 IEMOP_HLP_DONE_DECODING();
5766 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5767 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5768 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5769 IEM_MC_FETCH_EFLAGS(EFlags);
5770 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5771 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5772 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5773 else
5774 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5775
5776 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5777 IEM_MC_COMMIT_EFLAGS(EFlags);
5778 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5779 IEM_MC_ADVANCE_RIP();
5780 IEM_MC_END();
5781 return VINF_SUCCESS;
5782
5783 case IEMMODE_32BIT:
5784 IEM_MC_BEGIN(4, 3);
5785 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5786 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5787 IEM_MC_ARG(uint32_t, u32Src, 2);
5788 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5789 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5790 IEM_MC_LOCAL(uint32_t, u32Eax);
5791
5792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5793 IEMOP_HLP_DONE_DECODING();
5794 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5795 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5796 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5797 IEM_MC_FETCH_EFLAGS(EFlags);
5798 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5799 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5800 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5801 else
5802 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5803
5804 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5805 IEM_MC_COMMIT_EFLAGS(EFlags);
5806 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5807 IEM_MC_ADVANCE_RIP();
5808 IEM_MC_END();
5809 return VINF_SUCCESS;
5810
5811 case IEMMODE_64BIT:
5812 IEM_MC_BEGIN(4, 3);
5813 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5814 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5815#ifdef RT_ARCH_X86
5816 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5817#else
5818 IEM_MC_ARG(uint64_t, u64Src, 2);
5819#endif
5820 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5822 IEM_MC_LOCAL(uint64_t, u64Rax);
5823
5824 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5825 IEMOP_HLP_DONE_DECODING();
5826 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5827 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5828 IEM_MC_FETCH_EFLAGS(EFlags);
5829 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5830#ifdef RT_ARCH_X86
5831 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5832 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5833 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5834 else
5835 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5836#else
5837 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5838 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5839 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5840 else
5841 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5842#endif
5843
5844 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5845 IEM_MC_COMMIT_EFLAGS(EFlags);
5846 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5847 IEM_MC_ADVANCE_RIP();
5848 IEM_MC_END();
5849 return VINF_SUCCESS;
5850
5851 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5852 }
5853 }
5854}
5855
5856
5857FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5858{
5859 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5860 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
5861
5862 switch (pVCpu->iem.s.enmEffOpSize)
5863 {
5864 case IEMMODE_16BIT:
5865 IEM_MC_BEGIN(5, 1);
5866 IEM_MC_ARG(uint16_t, uSel, 0);
5867 IEM_MC_ARG(uint16_t, offSeg, 1);
5868 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5869 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5870 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5871 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5874 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5875 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
5876 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5877 IEM_MC_END();
5878 return VINF_SUCCESS;
5879
5880 case IEMMODE_32BIT:
5881 IEM_MC_BEGIN(5, 1);
5882 IEM_MC_ARG(uint16_t, uSel, 0);
5883 IEM_MC_ARG(uint32_t, offSeg, 1);
5884 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5885 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5886 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5887 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5890 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5891 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
5892 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5893 IEM_MC_END();
5894 return VINF_SUCCESS;
5895
5896 case IEMMODE_64BIT:
5897 IEM_MC_BEGIN(5, 1);
5898 IEM_MC_ARG(uint16_t, uSel, 0);
5899 IEM_MC_ARG(uint64_t, offSeg, 1);
5900 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5901 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5902 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5903 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5906 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5907 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5908 else
5909 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5910 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
5911 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5912 IEM_MC_END();
5913 return VINF_SUCCESS;
5914
5915 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5916 }
5917}
5918
5919
5920/** Opcode 0x0f 0xb2. */
5921FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5922{
5923 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
5924 IEMOP_HLP_MIN_386();
5925 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5926 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5927 return IEMOP_RAISE_INVALID_OPCODE();
5928 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5929}
5930
5931
5932/** Opcode 0x0f 0xb3. */
5933FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5934{
5935 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
5936 IEMOP_HLP_MIN_386();
5937 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5938}
5939
5940
5941/** Opcode 0x0f 0xb4. */
5942FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5943{
5944 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
5945 IEMOP_HLP_MIN_386();
5946 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5947 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5948 return IEMOP_RAISE_INVALID_OPCODE();
5949 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5950}
5951
5952
5953/** Opcode 0x0f 0xb5. */
5954FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5955{
5956 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
5957 IEMOP_HLP_MIN_386();
5958 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5959 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5960 return IEMOP_RAISE_INVALID_OPCODE();
5961 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5962}
5963
5964
5965/** Opcode 0x0f 0xb6. */
5966FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5967{
5968 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
5969 IEMOP_HLP_MIN_386();
5970
5971 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5972
5973 /*
5974 * If rm is denoting a register, no more instruction bytes.
5975 */
5976 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5977 {
5978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5979 switch (pVCpu->iem.s.enmEffOpSize)
5980 {
5981 case IEMMODE_16BIT:
5982 IEM_MC_BEGIN(0, 1);
5983 IEM_MC_LOCAL(uint16_t, u16Value);
5984 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5985 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
5986 IEM_MC_ADVANCE_RIP();
5987 IEM_MC_END();
5988 return VINF_SUCCESS;
5989
5990 case IEMMODE_32BIT:
5991 IEM_MC_BEGIN(0, 1);
5992 IEM_MC_LOCAL(uint32_t, u32Value);
5993 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5994 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
5995 IEM_MC_ADVANCE_RIP();
5996 IEM_MC_END();
5997 return VINF_SUCCESS;
5998
5999 case IEMMODE_64BIT:
6000 IEM_MC_BEGIN(0, 1);
6001 IEM_MC_LOCAL(uint64_t, u64Value);
6002 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6003 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6004 IEM_MC_ADVANCE_RIP();
6005 IEM_MC_END();
6006 return VINF_SUCCESS;
6007
6008 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6009 }
6010 }
6011 else
6012 {
6013 /*
6014 * We're loading a register from memory.
6015 */
6016 switch (pVCpu->iem.s.enmEffOpSize)
6017 {
6018 case IEMMODE_16BIT:
6019 IEM_MC_BEGIN(0, 2);
6020 IEM_MC_LOCAL(uint16_t, u16Value);
6021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6024 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6025 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6026 IEM_MC_ADVANCE_RIP();
6027 IEM_MC_END();
6028 return VINF_SUCCESS;
6029
6030 case IEMMODE_32BIT:
6031 IEM_MC_BEGIN(0, 2);
6032 IEM_MC_LOCAL(uint32_t, u32Value);
6033 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6034 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6036 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6037 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6038 IEM_MC_ADVANCE_RIP();
6039 IEM_MC_END();
6040 return VINF_SUCCESS;
6041
6042 case IEMMODE_64BIT:
6043 IEM_MC_BEGIN(0, 2);
6044 IEM_MC_LOCAL(uint64_t, u64Value);
6045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6048 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6049 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6050 IEM_MC_ADVANCE_RIP();
6051 IEM_MC_END();
6052 return VINF_SUCCESS;
6053
6054 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6055 }
6056 }
6057}
6058
6059
6060/** Opcode 0x0f 0xb7. */
6061FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6062{
6063 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6064 IEMOP_HLP_MIN_386();
6065
6066 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6067
6068 /** @todo Not entirely sure how the operand size prefix is handled here,
6069 * assuming that it will be ignored. Would be nice to have a few
6070 * test for this. */
6071 /*
6072 * If rm is denoting a register, no more instruction bytes.
6073 */
6074 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6075 {
6076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6077 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6078 {
6079 IEM_MC_BEGIN(0, 1);
6080 IEM_MC_LOCAL(uint32_t, u32Value);
6081 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6082 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6083 IEM_MC_ADVANCE_RIP();
6084 IEM_MC_END();
6085 }
6086 else
6087 {
6088 IEM_MC_BEGIN(0, 1);
6089 IEM_MC_LOCAL(uint64_t, u64Value);
6090 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6091 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6092 IEM_MC_ADVANCE_RIP();
6093 IEM_MC_END();
6094 }
6095 }
6096 else
6097 {
6098 /*
6099 * We're loading a register from memory.
6100 */
6101 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6102 {
6103 IEM_MC_BEGIN(0, 2);
6104 IEM_MC_LOCAL(uint32_t, u32Value);
6105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6108 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6109 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6110 IEM_MC_ADVANCE_RIP();
6111 IEM_MC_END();
6112 }
6113 else
6114 {
6115 IEM_MC_BEGIN(0, 2);
6116 IEM_MC_LOCAL(uint64_t, u64Value);
6117 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6120 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6121 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6122 IEM_MC_ADVANCE_RIP();
6123 IEM_MC_END();
6124 }
6125 }
6126 return VINF_SUCCESS;
6127}
6128
6129
6130/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6131FNIEMOP_UD_STUB(iemOp_jmpe);
6132/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6133FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6134
6135
6136/** Opcode 0x0f 0xb9. */
6137FNIEMOP_DEF(iemOp_Grp10)
6138{
6139 Log(("iemOp_Grp10 -> #UD\n"));
6140 return IEMOP_RAISE_INVALID_OPCODE();
6141}
6142
6143
6144/** Opcode 0x0f 0xba. */
6145FNIEMOP_DEF(iemOp_Grp8)
6146{
6147 IEMOP_HLP_MIN_386();
6148 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6149 PCIEMOPBINSIZES pImpl;
6150 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6151 {
6152 case 0: case 1: case 2: case 3:
6153 return IEMOP_RAISE_INVALID_OPCODE();
6154 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6155 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6156 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6157 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6159 }
6160 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6161
6162 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6163 {
6164 /* register destination. */
6165 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6167
6168 switch (pVCpu->iem.s.enmEffOpSize)
6169 {
6170 case IEMMODE_16BIT:
6171 IEM_MC_BEGIN(3, 0);
6172 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6173 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6174 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6175
6176 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6177 IEM_MC_REF_EFLAGS(pEFlags);
6178 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6179
6180 IEM_MC_ADVANCE_RIP();
6181 IEM_MC_END();
6182 return VINF_SUCCESS;
6183
6184 case IEMMODE_32BIT:
6185 IEM_MC_BEGIN(3, 0);
6186 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6187 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6188 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6189
6190 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6191 IEM_MC_REF_EFLAGS(pEFlags);
6192 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6193
6194 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6195 IEM_MC_ADVANCE_RIP();
6196 IEM_MC_END();
6197 return VINF_SUCCESS;
6198
6199 case IEMMODE_64BIT:
6200 IEM_MC_BEGIN(3, 0);
6201 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6202 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6203 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6204
6205 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6206 IEM_MC_REF_EFLAGS(pEFlags);
6207 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6208
6209 IEM_MC_ADVANCE_RIP();
6210 IEM_MC_END();
6211 return VINF_SUCCESS;
6212
6213 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6214 }
6215 }
6216 else
6217 {
6218 /* memory destination. */
6219
6220 uint32_t fAccess;
6221 if (pImpl->pfnLockedU16)
6222 fAccess = IEM_ACCESS_DATA_RW;
6223 else /* BT */
6224 fAccess = IEM_ACCESS_DATA_R;
6225
6226 /** @todo test negative bit offsets! */
6227 switch (pVCpu->iem.s.enmEffOpSize)
6228 {
6229 case IEMMODE_16BIT:
6230 IEM_MC_BEGIN(3, 1);
6231 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6232 IEM_MC_ARG(uint16_t, u16Src, 1);
6233 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6235
6236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6237 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6238 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6239 if (pImpl->pfnLockedU16)
6240 IEMOP_HLP_DONE_DECODING();
6241 else
6242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6243 IEM_MC_FETCH_EFLAGS(EFlags);
6244 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6245 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6246 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6247 else
6248 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6249 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6250
6251 IEM_MC_COMMIT_EFLAGS(EFlags);
6252 IEM_MC_ADVANCE_RIP();
6253 IEM_MC_END();
6254 return VINF_SUCCESS;
6255
6256 case IEMMODE_32BIT:
6257 IEM_MC_BEGIN(3, 1);
6258 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6259 IEM_MC_ARG(uint32_t, u32Src, 1);
6260 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6262
6263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6264 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6265 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6266 if (pImpl->pfnLockedU16)
6267 IEMOP_HLP_DONE_DECODING();
6268 else
6269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6270 IEM_MC_FETCH_EFLAGS(EFlags);
6271 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6272 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6273 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6274 else
6275 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6276 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6277
6278 IEM_MC_COMMIT_EFLAGS(EFlags);
6279 IEM_MC_ADVANCE_RIP();
6280 IEM_MC_END();
6281 return VINF_SUCCESS;
6282
6283 case IEMMODE_64BIT:
6284 IEM_MC_BEGIN(3, 1);
6285 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6286 IEM_MC_ARG(uint64_t, u64Src, 1);
6287 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6289
6290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6291 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6292 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6293 if (pImpl->pfnLockedU16)
6294 IEMOP_HLP_DONE_DECODING();
6295 else
6296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6297 IEM_MC_FETCH_EFLAGS(EFlags);
6298 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6299 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6300 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6301 else
6302 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6303 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6304
6305 IEM_MC_COMMIT_EFLAGS(EFlags);
6306 IEM_MC_ADVANCE_RIP();
6307 IEM_MC_END();
6308 return VINF_SUCCESS;
6309
6310 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6311 }
6312 }
6313
6314}
6315
6316
6317/** Opcode 0x0f 0xbb. */
6318FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6319{
6320 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6321 IEMOP_HLP_MIN_386();
6322 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6323}
6324
6325
6326/** Opcode 0x0f 0xbc. */
6327FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6328{
6329 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6330 IEMOP_HLP_MIN_386();
6331 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6332 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6333}
6334
6335
6336/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6337FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6338
6339
6340/** Opcode 0x0f 0xbd. */
6341FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6342{
6343 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6344 IEMOP_HLP_MIN_386();
6345 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6346 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6347}
6348
6349
6350/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6351FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6352
6353
6354/** Opcode 0x0f 0xbe. */
6355FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6356{
6357 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6358 IEMOP_HLP_MIN_386();
6359
6360 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6361
6362 /*
6363 * If rm is denoting a register, no more instruction bytes.
6364 */
6365 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6366 {
6367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6368 switch (pVCpu->iem.s.enmEffOpSize)
6369 {
6370 case IEMMODE_16BIT:
6371 IEM_MC_BEGIN(0, 1);
6372 IEM_MC_LOCAL(uint16_t, u16Value);
6373 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6374 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6375 IEM_MC_ADVANCE_RIP();
6376 IEM_MC_END();
6377 return VINF_SUCCESS;
6378
6379 case IEMMODE_32BIT:
6380 IEM_MC_BEGIN(0, 1);
6381 IEM_MC_LOCAL(uint32_t, u32Value);
6382 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6383 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6384 IEM_MC_ADVANCE_RIP();
6385 IEM_MC_END();
6386 return VINF_SUCCESS;
6387
6388 case IEMMODE_64BIT:
6389 IEM_MC_BEGIN(0, 1);
6390 IEM_MC_LOCAL(uint64_t, u64Value);
6391 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6392 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6393 IEM_MC_ADVANCE_RIP();
6394 IEM_MC_END();
6395 return VINF_SUCCESS;
6396
6397 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6398 }
6399 }
6400 else
6401 {
6402 /*
6403 * We're loading a register from memory.
6404 */
6405 switch (pVCpu->iem.s.enmEffOpSize)
6406 {
6407 case IEMMODE_16BIT:
6408 IEM_MC_BEGIN(0, 2);
6409 IEM_MC_LOCAL(uint16_t, u16Value);
6410 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6413 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6414 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6415 IEM_MC_ADVANCE_RIP();
6416 IEM_MC_END();
6417 return VINF_SUCCESS;
6418
6419 case IEMMODE_32BIT:
6420 IEM_MC_BEGIN(0, 2);
6421 IEM_MC_LOCAL(uint32_t, u32Value);
6422 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6425 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6426 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6427 IEM_MC_ADVANCE_RIP();
6428 IEM_MC_END();
6429 return VINF_SUCCESS;
6430
6431 case IEMMODE_64BIT:
6432 IEM_MC_BEGIN(0, 2);
6433 IEM_MC_LOCAL(uint64_t, u64Value);
6434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6437 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6438 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6439 IEM_MC_ADVANCE_RIP();
6440 IEM_MC_END();
6441 return VINF_SUCCESS;
6442
6443 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6444 }
6445 }
6446}
6447
6448
6449/** Opcode 0x0f 0xbf. */
6450FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6451{
6452 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6453 IEMOP_HLP_MIN_386();
6454
6455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6456
6457 /** @todo Not entirely sure how the operand size prefix is handled here,
6458 * assuming that it will be ignored. Would be nice to have a few
6459 * test for this. */
6460 /*
6461 * If rm is denoting a register, no more instruction bytes.
6462 */
6463 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6464 {
6465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6466 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6467 {
6468 IEM_MC_BEGIN(0, 1);
6469 IEM_MC_LOCAL(uint32_t, u32Value);
6470 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6471 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6472 IEM_MC_ADVANCE_RIP();
6473 IEM_MC_END();
6474 }
6475 else
6476 {
6477 IEM_MC_BEGIN(0, 1);
6478 IEM_MC_LOCAL(uint64_t, u64Value);
6479 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6480 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6481 IEM_MC_ADVANCE_RIP();
6482 IEM_MC_END();
6483 }
6484 }
6485 else
6486 {
6487 /*
6488 * We're loading a register from memory.
6489 */
6490 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6491 {
6492 IEM_MC_BEGIN(0, 2);
6493 IEM_MC_LOCAL(uint32_t, u32Value);
6494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6497 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6498 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6499 IEM_MC_ADVANCE_RIP();
6500 IEM_MC_END();
6501 }
6502 else
6503 {
6504 IEM_MC_BEGIN(0, 2);
6505 IEM_MC_LOCAL(uint64_t, u64Value);
6506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6509 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6510 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6511 IEM_MC_ADVANCE_RIP();
6512 IEM_MC_END();
6513 }
6514 }
6515 return VINF_SUCCESS;
6516}
6517
6518
6519/** Opcode 0x0f 0xc0. */
6520FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6521{
6522 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6523 IEMOP_HLP_MIN_486();
6524 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6525
6526 /*
6527 * If rm is denoting a register, no more instruction bytes.
6528 */
6529 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6530 {
6531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6532
6533 IEM_MC_BEGIN(3, 0);
6534 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6535 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6536 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6537
6538 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6539 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6540 IEM_MC_REF_EFLAGS(pEFlags);
6541 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6542
6543 IEM_MC_ADVANCE_RIP();
6544 IEM_MC_END();
6545 }
6546 else
6547 {
6548 /*
6549 * We're accessing memory.
6550 */
6551 IEM_MC_BEGIN(3, 3);
6552 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6553 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6554 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6555 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6557
6558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6559 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6560 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6561 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6562 IEM_MC_FETCH_EFLAGS(EFlags);
6563 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6564 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6565 else
6566 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6567
6568 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6569 IEM_MC_COMMIT_EFLAGS(EFlags);
6570 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6571 IEM_MC_ADVANCE_RIP();
6572 IEM_MC_END();
6573 return VINF_SUCCESS;
6574 }
6575 return VINF_SUCCESS;
6576}
6577
6578
6579/** Opcode 0x0f 0xc1. */
6580FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6581{
6582 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
6583 IEMOP_HLP_MIN_486();
6584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6585
6586 /*
6587 * If rm is denoting a register, no more instruction bytes.
6588 */
6589 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6590 {
6591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6592
6593 switch (pVCpu->iem.s.enmEffOpSize)
6594 {
6595 case IEMMODE_16BIT:
6596 IEM_MC_BEGIN(3, 0);
6597 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6598 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6599 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6600
6601 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6602 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6603 IEM_MC_REF_EFLAGS(pEFlags);
6604 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6605
6606 IEM_MC_ADVANCE_RIP();
6607 IEM_MC_END();
6608 return VINF_SUCCESS;
6609
6610 case IEMMODE_32BIT:
6611 IEM_MC_BEGIN(3, 0);
6612 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6613 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6614 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6615
6616 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6617 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6618 IEM_MC_REF_EFLAGS(pEFlags);
6619 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6620
6621 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6622 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6623 IEM_MC_ADVANCE_RIP();
6624 IEM_MC_END();
6625 return VINF_SUCCESS;
6626
6627 case IEMMODE_64BIT:
6628 IEM_MC_BEGIN(3, 0);
6629 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6630 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6631 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6632
6633 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6634 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6635 IEM_MC_REF_EFLAGS(pEFlags);
6636 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6637
6638 IEM_MC_ADVANCE_RIP();
6639 IEM_MC_END();
6640 return VINF_SUCCESS;
6641
6642 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6643 }
6644 }
6645 else
6646 {
6647 /*
6648 * We're accessing memory.
6649 */
6650 switch (pVCpu->iem.s.enmEffOpSize)
6651 {
6652 case IEMMODE_16BIT:
6653 IEM_MC_BEGIN(3, 3);
6654 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6655 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6656 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6657 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6658 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6659
6660 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6661 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6662 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6663 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6664 IEM_MC_FETCH_EFLAGS(EFlags);
6665 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6666 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6667 else
6668 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6669
6670 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6671 IEM_MC_COMMIT_EFLAGS(EFlags);
6672 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
6673 IEM_MC_ADVANCE_RIP();
6674 IEM_MC_END();
6675 return VINF_SUCCESS;
6676
6677 case IEMMODE_32BIT:
6678 IEM_MC_BEGIN(3, 3);
6679 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6680 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6681 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6682 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6683 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6684
6685 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6686 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6687 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6688 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6689 IEM_MC_FETCH_EFLAGS(EFlags);
6690 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6691 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6692 else
6693 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6694
6695 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6696 IEM_MC_COMMIT_EFLAGS(EFlags);
6697 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
6698 IEM_MC_ADVANCE_RIP();
6699 IEM_MC_END();
6700 return VINF_SUCCESS;
6701
6702 case IEMMODE_64BIT:
6703 IEM_MC_BEGIN(3, 3);
6704 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6705 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6706 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6707 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6709
6710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6711 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6712 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6713 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6714 IEM_MC_FETCH_EFLAGS(EFlags);
6715 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6716 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6717 else
6718 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6719
6720 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6721 IEM_MC_COMMIT_EFLAGS(EFlags);
6722 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
6723 IEM_MC_ADVANCE_RIP();
6724 IEM_MC_END();
6725 return VINF_SUCCESS;
6726
6727 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6728 }
6729 }
6730}
6731
6732
6733/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
6734FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
6735/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
6736FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
6737/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
6738FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
6739/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
6740FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
6741
6742
6743/** Opcode 0x0f 0xc3. */
6744FNIEMOP_DEF(iemOp_movnti_My_Gy)
6745{
6746 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
6747
6748 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6749
6750 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
6751 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6752 {
6753 switch (pVCpu->iem.s.enmEffOpSize)
6754 {
6755 case IEMMODE_32BIT:
6756 IEM_MC_BEGIN(0, 2);
6757 IEM_MC_LOCAL(uint32_t, u32Value);
6758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6759
6760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6762 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6763 return IEMOP_RAISE_INVALID_OPCODE();
6764
6765 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6766 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
6767 IEM_MC_ADVANCE_RIP();
6768 IEM_MC_END();
6769 break;
6770
6771 case IEMMODE_64BIT:
6772 IEM_MC_BEGIN(0, 2);
6773 IEM_MC_LOCAL(uint64_t, u64Value);
6774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6775
6776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6778 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6779 return IEMOP_RAISE_INVALID_OPCODE();
6780
6781 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6782 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
6783 IEM_MC_ADVANCE_RIP();
6784 IEM_MC_END();
6785 break;
6786
6787 case IEMMODE_16BIT:
6788 /** @todo check this form. */
6789 return IEMOP_RAISE_INVALID_OPCODE();
6790 }
6791 }
6792 else
6793 return IEMOP_RAISE_INVALID_OPCODE();
6794 return VINF_SUCCESS;
6795}
6796/* Opcode 0x66 0x0f 0xc3 - invalid */
6797/* Opcode 0xf3 0x0f 0xc3 - invalid */
6798/* Opcode 0xf2 0x0f 0xc3 - invalid */
6799
6800/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
6801FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
6802/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
6803FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
6804/* Opcode 0xf3 0x0f 0xc4 - invalid */
6805/* Opcode 0xf2 0x0f 0xc4 - invalid */
6806
6807/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
6808FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
6809/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
6810FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
6811/* Opcode 0xf3 0x0f 0xc5 - invalid */
6812/* Opcode 0xf2 0x0f 0xc5 - invalid */
6813
6814/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
6815FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
6816/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
6817FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
6818/* Opcode 0xf3 0x0f 0xc6 - invalid */
6819/* Opcode 0xf2 0x0f 0xc6 - invalid */
6820
6821
6822/** Opcode 0x0f 0xc7 !11/1. */
6823FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6824{
6825 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
6826
6827 IEM_MC_BEGIN(4, 3);
6828 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6829 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6830 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6831 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6832 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6833 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6835
6836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6837 IEMOP_HLP_DONE_DECODING();
6838 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6839
6840 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6841 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6842 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6843
6844 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6845 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6846 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6847
6848 IEM_MC_FETCH_EFLAGS(EFlags);
6849 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6850 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6851 else
6852 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6853
6854 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6855 IEM_MC_COMMIT_EFLAGS(EFlags);
6856 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6857 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6858 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6859 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6860 IEM_MC_ENDIF();
6861 IEM_MC_ADVANCE_RIP();
6862
6863 IEM_MC_END();
6864 return VINF_SUCCESS;
6865}
6866
6867
6868/** Opcode REX.W 0x0f 0xc7 !11/1. */
6869FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
6870{
6871 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
6872 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
6873 {
6874#if 0
6875 RT_NOREF(bRm);
6876 IEMOP_BITCH_ABOUT_STUB();
6877 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6878#else
6879 IEM_MC_BEGIN(4, 3);
6880 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
6881 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
6882 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
6883 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6884 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
6885 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
6886 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6887
6888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6889 IEMOP_HLP_DONE_DECODING();
6890 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
6891 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6892
6893 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
6894 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
6895 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
6896
6897 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
6898 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
6899 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
6900
6901 IEM_MC_FETCH_EFLAGS(EFlags);
6902# ifdef RT_ARCH_AMD64
6903 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
6904 {
6905 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6906 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6907 else
6908 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6909 }
6910 else
6911# endif
6912 {
6913 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
6914 accesses and not all all atomic, which works fine on in UNI CPU guest
6915 configuration (ignoring DMA). If guest SMP is active we have no choice
6916 but to use a rendezvous callback here. Sigh. */
6917 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
6918 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6919 else
6920 {
6921 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6922 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
6923 }
6924 }
6925
6926 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
6927 IEM_MC_COMMIT_EFLAGS(EFlags);
6928 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6929 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
6930 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
6931 IEM_MC_ENDIF();
6932 IEM_MC_ADVANCE_RIP();
6933
6934 IEM_MC_END();
6935 return VINF_SUCCESS;
6936#endif
6937 }
6938 Log(("cmpxchg16b -> #UD\n"));
6939 return IEMOP_RAISE_INVALID_OPCODE();
6940}
6941
6942
6943/** Opcode 0x0f 0xc7 11/6. */
6944FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6945
6946/** Opcode 0x0f 0xc7 !11/6. */
6947FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6948
6949/** Opcode 0x66 0x0f 0xc7 !11/6. */
6950FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6951
6952/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6953FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6954
6955/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6956FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6957
6958
6959/** Opcode 0x0f 0xc7. */
6960FNIEMOP_DEF(iemOp_Grp9)
6961{
6962 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6963 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6964 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6965 {
6966 case 0: case 2: case 3: case 4: case 5:
6967 return IEMOP_RAISE_INVALID_OPCODE();
6968 case 1:
6969 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6970 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6971 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6972 return IEMOP_RAISE_INVALID_OPCODE();
6973 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6974 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6975 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6976 case 6:
6977 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6978 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6979 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6980 {
6981 case 0:
6982 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6983 case IEM_OP_PRF_SIZE_OP:
6984 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6985 case IEM_OP_PRF_REPZ:
6986 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6987 default:
6988 return IEMOP_RAISE_INVALID_OPCODE();
6989 }
6990 case 7:
6991 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6992 {
6993 case 0:
6994 case IEM_OP_PRF_REPZ:
6995 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6996 default:
6997 return IEMOP_RAISE_INVALID_OPCODE();
6998 }
6999 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7000 }
7001}
7002
7003
7004/**
7005 * Common 'bswap register' helper.
7006 */
7007FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7008{
7009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7010 switch (pVCpu->iem.s.enmEffOpSize)
7011 {
7012 case IEMMODE_16BIT:
7013 IEM_MC_BEGIN(1, 0);
7014 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7015 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7016 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7017 IEM_MC_ADVANCE_RIP();
7018 IEM_MC_END();
7019 return VINF_SUCCESS;
7020
7021 case IEMMODE_32BIT:
7022 IEM_MC_BEGIN(1, 0);
7023 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7024 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7025 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7026 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7027 IEM_MC_ADVANCE_RIP();
7028 IEM_MC_END();
7029 return VINF_SUCCESS;
7030
7031 case IEMMODE_64BIT:
7032 IEM_MC_BEGIN(1, 0);
7033 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7034 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7035 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7036 IEM_MC_ADVANCE_RIP();
7037 IEM_MC_END();
7038 return VINF_SUCCESS;
7039
7040 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7041 }
7042}
7043
7044
7045/** Opcode 0x0f 0xc8. */
7046FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7047{
7048 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7049 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7050 prefix. REX.B is the correct prefix it appears. For a parallel
7051 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7052 IEMOP_HLP_MIN_486();
7053 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7054}
7055
7056
7057/** Opcode 0x0f 0xc9. */
7058FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7059{
7060 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7061 IEMOP_HLP_MIN_486();
7062 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7063}
7064
7065
7066/** Opcode 0x0f 0xca. */
7067FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7068{
7069 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7070 IEMOP_HLP_MIN_486();
7071 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7072}
7073
7074
7075/** Opcode 0x0f 0xcb. */
7076FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7077{
7078 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7079 IEMOP_HLP_MIN_486();
7080 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7081}
7082
7083
7084/** Opcode 0x0f 0xcc. */
7085FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7086{
7087 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7088 IEMOP_HLP_MIN_486();
7089 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7090}
7091
7092
7093/** Opcode 0x0f 0xcd. */
7094FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7095{
7096 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7097 IEMOP_HLP_MIN_486();
7098 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7099}
7100
7101
7102/** Opcode 0x0f 0xce. */
7103FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7104{
7105 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7106 IEMOP_HLP_MIN_486();
7107 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7108}
7109
7110
7111/** Opcode 0x0f 0xcf. */
7112FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7113{
7114 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7115 IEMOP_HLP_MIN_486();
7116 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7117}
7118
7119
7120/* Opcode 0x0f 0xd0 - invalid */
7121/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7122FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7123/* Opcode 0xf3 0x0f 0xd0 - invalid */
7124/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7125FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7126
7127/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7128FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7129/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7130FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7131/* Opcode 0xf3 0x0f 0xd1 - invalid */
7132/* Opcode 0xf2 0x0f 0xd1 - invalid */
7133
7134/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7135FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7136/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7137FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7138/* Opcode 0xf3 0x0f 0xd2 - invalid */
7139/* Opcode 0xf2 0x0f 0xd2 - invalid */
7140
7141/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7142FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7143/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7144FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7145/* Opcode 0xf3 0x0f 0xd3 - invalid */
7146/* Opcode 0xf2 0x0f 0xd3 - invalid */
7147
7148/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7149FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7150/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7151FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7152/* Opcode 0xf3 0x0f 0xd4 - invalid */
7153/* Opcode 0xf2 0x0f 0xd4 - invalid */
7154
7155/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7156FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7157/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7158FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7159/* Opcode 0xf3 0x0f 0xd5 - invalid */
7160/* Opcode 0xf2 0x0f 0xd5 - invalid */
7161
7162/* Opcode 0x0f 0xd6 - invalid */
7163/** Opcode 0x66 0x0f 0xd6 - vmovq Wq, Vq */
7164FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
7165/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7166FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7167/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7168FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7169#if 0
7170FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7171{
7172 /* Docs says register only. */
7173 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7174
7175 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7176 {
7177 case IEM_OP_PRF_SIZE_OP: /* SSE */
7178 IEMOP_MNEMONIC(movq_Wq_Vq, "movq Wq,Vq");
7179 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7180 IEM_MC_BEGIN(2, 0);
7181 IEM_MC_ARG(uint64_t *, pDst, 0);
7182 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7183 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7184 IEM_MC_PREPARE_SSE_USAGE();
7185 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7186 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7187 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7188 IEM_MC_ADVANCE_RIP();
7189 IEM_MC_END();
7190 return VINF_SUCCESS;
7191
7192 case 0: /* MMX */
7193 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7194 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7195 IEM_MC_BEGIN(2, 0);
7196 IEM_MC_ARG(uint64_t *, pDst, 0);
7197 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7198 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7199 IEM_MC_PREPARE_FPU_USAGE();
7200 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7201 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7202 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7203 IEM_MC_ADVANCE_RIP();
7204 IEM_MC_END();
7205 return VINF_SUCCESS;
7206
7207 default:
7208 return IEMOP_RAISE_INVALID_OPCODE();
7209 }
7210}
7211#endif
7212
7213
7214/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
7215FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
7216{
7217 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7218 /** @todo testcase: Check that the instruction implicitly clears the high
7219 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7220 * and opcode modifications are made to work with the whole width (not
7221 * just 128). */
7222 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
7223 /* Docs says register only. */
7224 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7225 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7226 {
7227 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7228 IEM_MC_BEGIN(2, 0);
7229 IEM_MC_ARG(uint64_t *, pDst, 0);
7230 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7231 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7232 IEM_MC_PREPARE_FPU_USAGE();
7233 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7234 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7235 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7236 IEM_MC_ADVANCE_RIP();
7237 IEM_MC_END();
7238 return VINF_SUCCESS;
7239 }
7240 return IEMOP_RAISE_INVALID_OPCODE();
7241}
7242
7243/** Opcode 0x66 0x0f 0xd7 - */
7244FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
7245{
7246 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7247 /** @todo testcase: Check that the instruction implicitly clears the high
7248 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7249 * and opcode modifications are made to work with the whole width (not
7250 * just 128). */
7251 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
7252 /* Docs says register only. */
7253 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7254 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7255 {
7256 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7257 IEM_MC_BEGIN(2, 0);
7258 IEM_MC_ARG(uint64_t *, pDst, 0);
7259 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7260 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7261 IEM_MC_PREPARE_SSE_USAGE();
7262 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7263 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7264 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7265 IEM_MC_ADVANCE_RIP();
7266 IEM_MC_END();
7267 return VINF_SUCCESS;
7268 }
7269 return IEMOP_RAISE_INVALID_OPCODE();
7270}
7271
7272/* Opcode 0xf3 0x0f 0xd7 - invalid */
7273/* Opcode 0xf2 0x0f 0xd7 - invalid */
7274
7275
7276/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7277FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7278/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
7279FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
7280/* Opcode 0xf3 0x0f 0xd8 - invalid */
7281/* Opcode 0xf2 0x0f 0xd8 - invalid */
7282
7283/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7284FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7285/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
7286FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
7287/* Opcode 0xf3 0x0f 0xd9 - invalid */
7288/* Opcode 0xf2 0x0f 0xd9 - invalid */
7289
7290/** Opcode 0x0f 0xda - pminub Pq, Qq */
7291FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7292/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
7293FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
7294/* Opcode 0xf3 0x0f 0xda - invalid */
7295/* Opcode 0xf2 0x0f 0xda - invalid */
7296
7297/** Opcode 0x0f 0xdb - pand Pq, Qq */
7298FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7299/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
7300FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
7301/* Opcode 0xf3 0x0f 0xdb - invalid */
7302/* Opcode 0xf2 0x0f 0xdb - invalid */
7303
7304/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7305FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7306/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
7307FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
7308/* Opcode 0xf3 0x0f 0xdc - invalid */
7309/* Opcode 0xf2 0x0f 0xdc - invalid */
7310
7311/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7312FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7313/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
7314FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
7315/* Opcode 0xf3 0x0f 0xdd - invalid */
7316/* Opcode 0xf2 0x0f 0xdd - invalid */
7317
7318/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7319FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7320/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
7321FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
7322/* Opcode 0xf3 0x0f 0xde - invalid */
7323/* Opcode 0xf2 0x0f 0xde - invalid */
7324
7325/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7326FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7327/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
7328FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
7329/* Opcode 0xf3 0x0f 0xdf - invalid */
7330/* Opcode 0xf2 0x0f 0xdf - invalid */
7331
7332/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7333FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7334/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
7335FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
7336/* Opcode 0xf3 0x0f 0xe0 - invalid */
7337/* Opcode 0xf2 0x0f 0xe0 - invalid */
7338
7339/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7340FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7341/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
7342FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
7343/* Opcode 0xf3 0x0f 0xe1 - invalid */
7344/* Opcode 0xf2 0x0f 0xe1 - invalid */
7345
7346/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7347FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7348/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
7349FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
7350/* Opcode 0xf3 0x0f 0xe2 - invalid */
7351/* Opcode 0xf2 0x0f 0xe2 - invalid */
7352
7353/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7354FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7355/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
7356FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
7357/* Opcode 0xf3 0x0f 0xe3 - invalid */
7358/* Opcode 0xf2 0x0f 0xe3 - invalid */
7359
7360/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7361FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7362/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
7363FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
7364/* Opcode 0xf3 0x0f 0xe4 - invalid */
7365/* Opcode 0xf2 0x0f 0xe4 - invalid */
7366
7367/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7368FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7369/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
7370FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
7371/* Opcode 0xf3 0x0f 0xe5 - invalid */
7372/* Opcode 0xf2 0x0f 0xe5 - invalid */
7373
7374/* Opcode 0x0f 0xe6 - invalid */
7375/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
7376FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
7377/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
7378FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
7379/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
7380FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
7381
7382
7383/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
7384FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
7385{
7386 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7387 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7388 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7389 {
7390 /* Register, memory. */
7391 IEM_MC_BEGIN(0, 2);
7392 IEM_MC_LOCAL(uint64_t, uSrc);
7393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7394
7395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7397 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7398 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7399
7400 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7401 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7402
7403 IEM_MC_ADVANCE_RIP();
7404 IEM_MC_END();
7405 return VINF_SUCCESS;
7406 }
7407 /* The register, register encoding is invalid. */
7408 return IEMOP_RAISE_INVALID_OPCODE();
7409}
7410
7411/** Opcode 0x66 0x0f 0xe7 - vmovntdq Mx, Vx */
7412FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
7413{
7414 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7415 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7416 {
7417 /* Register, memory. */
7418 IEMOP_MNEMONIC(vmovntdq_Mx_Vx, "vmovntdq Mx,Vx");
7419 IEM_MC_BEGIN(0, 2);
7420 IEM_MC_LOCAL(uint128_t, uSrc);
7421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7422
7423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7425 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7426 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7427
7428 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7429 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7430
7431 IEM_MC_ADVANCE_RIP();
7432 IEM_MC_END();
7433 return VINF_SUCCESS;
7434 }
7435
7436 /* The register, register encoding is invalid. */
7437 return IEMOP_RAISE_INVALID_OPCODE();
7438}
7439
7440/* Opcode 0xf3 0x0f 0xe7 - invalid */
7441/* Opcode 0xf2 0x0f 0xe7 - invalid */
7442
7443
7444/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
7445FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
7446/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
7447FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
7448/* Opcode 0xf3 0x0f 0xe8 - invalid */
7449/* Opcode 0xf2 0x0f 0xe8 - invalid */
7450
7451/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
7452FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
7453/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
7454FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
7455/* Opcode 0xf3 0x0f 0xe9 - invalid */
7456/* Opcode 0xf2 0x0f 0xe9 - invalid */
7457
7458/** Opcode 0x0f 0xea - pminsw Pq, Qq */
7459FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
7460/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
7461FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
7462/* Opcode 0xf3 0x0f 0xea - invalid */
7463/* Opcode 0xf2 0x0f 0xea - invalid */
7464
7465/** Opcode 0x0f 0xeb - por Pq, Qq */
7466FNIEMOP_STUB(iemOp_por_Pq_Qq);
7467/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
7468FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
7469/* Opcode 0xf3 0x0f 0xeb - invalid */
7470/* Opcode 0xf2 0x0f 0xeb - invalid */
7471
7472/** Opcode 0x0f 0xec - paddsb Pq, Qq */
7473FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
7474/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
7475FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
7476/* Opcode 0xf3 0x0f 0xec - invalid */
7477/* Opcode 0xf2 0x0f 0xec - invalid */
7478
7479/** Opcode 0x0f 0xed - paddsw Pq, Qq */
7480FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
7481/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
7482FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
7483/* Opcode 0xf3 0x0f 0xed - invalid */
7484/* Opcode 0xf2 0x0f 0xed - invalid */
7485
7486/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
7487FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
7488/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
7489FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
7490/* Opcode 0xf3 0x0f 0xee - invalid */
7491/* Opcode 0xf2 0x0f 0xee - invalid */
7492
7493
7494/** Opcode 0x0f 0xef - pxor Pq, Qq */
7495FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
7496{
7497 IEMOP_MNEMONIC(pxor, "pxor");
7498 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
7499}
7500
7501/** Opcode 0x66 0x0f 0xef - vpxor Vx, Hx, Wx */
7502FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
7503{
7504 IEMOP_MNEMONIC(vpxor, "vpxor");
7505 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7506}
7507
7508/* Opcode 0xf3 0x0f 0xef - invalid */
7509/* Opcode 0xf2 0x0f 0xef - invalid */
7510
7511/* Opcode 0x0f 0xf0 - invalid */
7512/* Opcode 0x66 0x0f 0xf0 - invalid */
7513/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
7514FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
7515
7516/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
7517FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
7518/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
7519FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
7520/* Opcode 0xf2 0x0f 0xf1 - invalid */
7521
7522/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
7523FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
7524/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
7525FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
7526/* Opcode 0xf2 0x0f 0xf2 - invalid */
7527
7528/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
7529FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
7530/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
7531FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
7532/* Opcode 0xf2 0x0f 0xf3 - invalid */
7533
7534/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
7535FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
7536/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
7537FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
7538/* Opcode 0xf2 0x0f 0xf4 - invalid */
7539
7540/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
7541FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
7542/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
7543FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
7544/* Opcode 0xf2 0x0f 0xf5 - invalid */
7545
7546/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
7547FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
7548/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
7549FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
7550/* Opcode 0xf2 0x0f 0xf6 - invalid */
7551
7552/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
7553FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
7554/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
7555FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
7556/* Opcode 0xf2 0x0f 0xf7 - invalid */
7557
7558/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
7559FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
7560/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
7561FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
7562/* Opcode 0xf2 0x0f 0xf8 - invalid */
7563
7564/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
7565FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
7566/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
7567FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
7568/* Opcode 0xf2 0x0f 0xf9 - invalid */
7569
7570/** Opcode 0x0f 0xfa - psubd Pq, Qq */
7571FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
7572/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
7573FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
7574/* Opcode 0xf2 0x0f 0xfa - invalid */
7575
7576/** Opcode 0x0f 0xfb - psubq Pq, Qq */
7577FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
7578/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
7579FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
7580/* Opcode 0xf2 0x0f 0xfb - invalid */
7581
7582/** Opcode 0x0f 0xfc - paddb Pq, Qq */
7583FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
7584/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
7585FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
7586/* Opcode 0xf2 0x0f 0xfc - invalid */
7587
7588/** Opcode 0x0f 0xfd - paddw Pq, Qq */
7589FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
7590/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
7591FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
7592/* Opcode 0xf2 0x0f 0xfd - invalid */
7593
7594/** Opcode 0x0f 0xfe - paddd Pq, Qq */
7595FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
7596/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
7597FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
7598/* Opcode 0xf2 0x0f 0xfe - invalid */
7599
7600
7601/** Opcode **** 0x0f 0xff - UD0 */
7602FNIEMOP_DEF(iemOp_ud0)
7603{
7604 IEMOP_MNEMONIC(ud0, "ud0");
7605 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
7606 {
7607 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
7608#ifndef TST_IEM_CHECK_MC
7609 RTGCPTR GCPtrEff;
7610 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
7611 if (rcStrict != VINF_SUCCESS)
7612 return rcStrict;
7613#endif
7614 IEMOP_HLP_DONE_DECODING();
7615 }
7616 return IEMOP_RAISE_INVALID_OPCODE();
7617}
7618
7619
7620
7621/**
7622 * Two byte opcode map, first byte 0x0f.
7623 *
7624 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
7625 * check if it needs updating as well when making changes.
7626 */
7627IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
7628{
7629 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7630 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
7631 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
7632 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
7633 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
7634 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
7635 /* 0x05 */ IEMOP_X4(iemOp_syscall),
7636 /* 0x06 */ IEMOP_X4(iemOp_clts),
7637 /* 0x07 */ IEMOP_X4(iemOp_sysret),
7638 /* 0x08 */ IEMOP_X4(iemOp_invd),
7639 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
7640 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
7641 /* 0x0b */ IEMOP_X4(iemOp_ud2),
7642 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
7643 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
7644 /* 0x0e */ IEMOP_X4(iemOp_femms),
7645 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
7646
7647 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
7648 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
7649 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7650 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7651 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7652 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7653 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7654 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7655 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
7656 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
7657 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
7658 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
7659 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
7660 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
7661 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
7662 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
7663
7664 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
7665 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
7666 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
7667 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
7668 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
7669 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7670 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
7671 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7672 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7673 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7674 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
7675 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7676 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
7677 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
7678 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7679 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7680
7681 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
7682 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
7683 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
7684 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
7685 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
7686 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
7687 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
7688 /* 0x37 */ IEMOP_X4(iemOp_getsec),
7689 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
7690 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7691 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
7692 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7693 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7694 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7695 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7696 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7697
7698 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
7699 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
7700 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
7701 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
7702 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
7703 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
7704 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
7705 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
7706 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
7707 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
7708 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
7709 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
7710 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
7711 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
7712 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
7713 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
7714
7715 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7716 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
7717 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7718 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7719 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7720 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7721 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7722 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7723 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
7724 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
7725 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
7726 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
7727 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
7728 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
7729 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
7730 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
7731
7732 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7733 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7734 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7735 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7736 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7737 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7738 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7739 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7740 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7741 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7742 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7743 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7744 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7745 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7746 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7747 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
7748
7749 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
7750 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
7751 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
7752 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
7753 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7754 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7755 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7756 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7757
7758 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7759 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7760 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7761 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7762 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
7763 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
7764 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
7765 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
7766
7767 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
7768 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
7769 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
7770 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
7771 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
7772 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
7773 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
7774 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
7775 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
7776 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
7777 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
7778 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
7779 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
7780 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
7781 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
7782 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
7783
7784 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
7785 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
7786 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
7787 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
7788 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
7789 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
7790 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
7791 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
7792 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
7793 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
7794 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
7795 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
7796 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
7797 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
7798 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
7799 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
7800
7801 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
7802 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
7803 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
7804 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
7805 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
7806 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
7807 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
7808 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
7809 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
7810 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
7811 /* 0xaa */ IEMOP_X4(iemOp_rsm),
7812 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
7813 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
7814 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
7815 /* 0xae */ IEMOP_X4(iemOp_Grp15),
7816 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
7817
7818 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
7819 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
7820 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
7821 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
7822 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
7823 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
7824 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
7825 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
7826 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
7827 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
7828 /* 0xba */ IEMOP_X4(iemOp_Grp8),
7829 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
7830 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
7831 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
7832 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
7833 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
7834
7835 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
7836 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
7837 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
7838 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7839 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7840 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7841 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
7842 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
7843 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
7844 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
7845 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
7846 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
7847 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
7848 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
7849 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
7850 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
7851
7852 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
7853 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7854 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7855 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7856 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7857 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7858 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
7859 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7860 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7861 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7862 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7863 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7864 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7865 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7866 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7867 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7868
7869 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7870 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7871 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7872 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7873 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7874 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7875 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
7876 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7877 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7878 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7879 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7880 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7881 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7882 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7883 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7884 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7885
7886 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
7887 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7888 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7889 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7890 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7891 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7892 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7893 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7894 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7895 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7896 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7897 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7898 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7899 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7900 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7901 /* 0xff */ IEMOP_X4(iemOp_ud0),
7902};
7903AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
7904
7905
7906/**
7907 * VEX opcode map \#1.
7908 *
7909 * @remarks This is (currently) a subset of g_apfnTwoByteMap, so please check if
7910 * it it needs updating too when making changes.
7911 */
7912IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =
7913{
7914 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7915 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
7916 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
7917 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
7918 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
7919 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
7920 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
7921 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
7922 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
7923 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
7924 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
7925 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
7926 /* 0x0b */ IEMOP_X4(iemOp_InvalidNeedRM),
7927 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
7928 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
7929 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
7930 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
7931
7932 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
7933 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
7934 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7935 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7936 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7937 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7938 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7939 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7940 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
7941 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
7942 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
7943 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
7944 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
7945 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
7946 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
7947 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
7948
7949 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
7950 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
7951 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
7952 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
7953 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
7954 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
7955 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
7956 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
7957 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7958 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7959 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
7960 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7961 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
7962 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
7963 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7964 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7965
7966 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
7967 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
7968 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
7969 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
7970 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
7971 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
7972 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
7973 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
7974 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7975 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7976 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7977 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7978 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7979 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7980 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7981 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7982
7983 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
7984 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
7985 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
7986 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
7987 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
7988 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
7989 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
7990 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
7991 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
7992 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
7993 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
7994 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
7995 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
7996 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
7997 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
7998 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
7999
8000 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8001 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
8002 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8003 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8004 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8005 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8006 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8007 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8008 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8009 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8010 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8011 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8012 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8013 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8014 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8015 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8016
8017 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8018 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8019 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8020 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8021 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8022 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8023 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8024 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8025 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8026 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8027 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8028 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8029 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8030 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8031 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8032 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
8033
8034 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
8035 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_Grp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8036 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_Grp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8037 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_Grp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8038 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8039 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8040 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8041 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8042 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
8043 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
8044 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
8045 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
8046 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8047 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8048 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
8049 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
8050
8051 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
8052 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
8053 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
8054 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
8055 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
8056 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
8057 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
8058 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
8059 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
8060 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
8061 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
8062 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
8063 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
8064 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
8065 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
8066 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
8067 IEMOP_X4(iemOp_InvalidNeedRM),
8068 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
8069 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
8070 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
8071 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
8072 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
8073 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
8074 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
8075 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
8076 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
8077 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
8078 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
8079 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
8080 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
8081 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
8082 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
8083 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
8084
8085 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8086 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8087 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8088 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8089 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8090 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8091 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8092 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8093 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8094 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8095 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
8096 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
8097 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
8098 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
8099 /* 0xae */ IEMOP_X4(iemOp_Grp15), /** @todo groups and vex */
8100 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
8101
8102 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8103 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8104 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8105 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8106 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8107 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8108 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8109 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8110 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8111 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8112 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
8113 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
8114 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
8115 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
8116 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
8117 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
8118
8119 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8120 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8121 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8122 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8123 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8124 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8125 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8126 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8127 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8128 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8129 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
8130 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
8131 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
8132 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
8133 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
8134 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
8135
8136 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8137 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8138 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8139 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8140 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8141 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8142 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8143 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8144 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8145 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8146 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8147 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8148 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8149 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8150 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8151 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8152
8153 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8154 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8155 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8156 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8157 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8158 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8159 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8160 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8161 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8162 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8163 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8164 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8165 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8166 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8167 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8168 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8169
8170 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8171 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8172 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8173 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8174 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8175 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8176 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8177 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8178 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8179 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8180 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8181 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8182 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8183 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8184 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8185 /* 0xff */ IEMOP_X4(iemOp_ud0),
8186};
8187AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8188/** @} */
8189
8190
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette