VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 66314

Last change on this file since 66314 was 66314, checked in by vboxsync, 8 years ago

IEM: Use RTUINT128U instead of uint128_t; started on movsldup Vdq,Wdq.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 316.1 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 66314 2017-03-28 21:28:34Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/** @name ..... opcodes.
26 *
27 * @{
28 */
29
30/** @} */
31
32
33/** @name Two byte opcodes (first byte 0x0f).
34 *
35 * @{
36 */
37
38/** Opcode 0x0f 0x00 /0. */
39FNIEMOPRM_DEF(iemOp_Grp6_sldt)
40{
41 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
42 IEMOP_HLP_MIN_286();
43 IEMOP_HLP_NO_REAL_OR_V86_MODE();
44
45 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
46 {
47 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
48 switch (pVCpu->iem.s.enmEffOpSize)
49 {
50 case IEMMODE_16BIT:
51 IEM_MC_BEGIN(0, 1);
52 IEM_MC_LOCAL(uint16_t, u16Ldtr);
53 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
54 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
55 IEM_MC_ADVANCE_RIP();
56 IEM_MC_END();
57 break;
58
59 case IEMMODE_32BIT:
60 IEM_MC_BEGIN(0, 1);
61 IEM_MC_LOCAL(uint32_t, u32Ldtr);
62 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
63 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
64 IEM_MC_ADVANCE_RIP();
65 IEM_MC_END();
66 break;
67
68 case IEMMODE_64BIT:
69 IEM_MC_BEGIN(0, 1);
70 IEM_MC_LOCAL(uint64_t, u64Ldtr);
71 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
72 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
73 IEM_MC_ADVANCE_RIP();
74 IEM_MC_END();
75 break;
76
77 IEM_NOT_REACHED_DEFAULT_CASE_RET();
78 }
79 }
80 else
81 {
82 IEM_MC_BEGIN(0, 2);
83 IEM_MC_LOCAL(uint16_t, u16Ldtr);
84 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
85 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
86 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
87 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
88 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
89 IEM_MC_ADVANCE_RIP();
90 IEM_MC_END();
91 }
92 return VINF_SUCCESS;
93}
94
95
96/** Opcode 0x0f 0x00 /1. */
97FNIEMOPRM_DEF(iemOp_Grp6_str)
98{
99 IEMOP_MNEMONIC(str, "str Rv/Mw");
100 IEMOP_HLP_MIN_286();
101 IEMOP_HLP_NO_REAL_OR_V86_MODE();
102
103 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
104 {
105 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
106 switch (pVCpu->iem.s.enmEffOpSize)
107 {
108 case IEMMODE_16BIT:
109 IEM_MC_BEGIN(0, 1);
110 IEM_MC_LOCAL(uint16_t, u16Tr);
111 IEM_MC_FETCH_TR_U16(u16Tr);
112 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
113 IEM_MC_ADVANCE_RIP();
114 IEM_MC_END();
115 break;
116
117 case IEMMODE_32BIT:
118 IEM_MC_BEGIN(0, 1);
119 IEM_MC_LOCAL(uint32_t, u32Tr);
120 IEM_MC_FETCH_TR_U32(u32Tr);
121 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
122 IEM_MC_ADVANCE_RIP();
123 IEM_MC_END();
124 break;
125
126 case IEMMODE_64BIT:
127 IEM_MC_BEGIN(0, 1);
128 IEM_MC_LOCAL(uint64_t, u64Tr);
129 IEM_MC_FETCH_TR_U64(u64Tr);
130 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
131 IEM_MC_ADVANCE_RIP();
132 IEM_MC_END();
133 break;
134
135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
136 }
137 }
138 else
139 {
140 IEM_MC_BEGIN(0, 2);
141 IEM_MC_LOCAL(uint16_t, u16Tr);
142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
144 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
145 IEM_MC_FETCH_TR_U16(u16Tr);
146 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
147 IEM_MC_ADVANCE_RIP();
148 IEM_MC_END();
149 }
150 return VINF_SUCCESS;
151}
152
153
154/** Opcode 0x0f 0x00 /2. */
155FNIEMOPRM_DEF(iemOp_Grp6_lldt)
156{
157 IEMOP_MNEMONIC(lldt, "lldt Ew");
158 IEMOP_HLP_MIN_286();
159 IEMOP_HLP_NO_REAL_OR_V86_MODE();
160
161 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
162 {
163 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
164 IEM_MC_BEGIN(1, 0);
165 IEM_MC_ARG(uint16_t, u16Sel, 0);
166 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
167 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
168 IEM_MC_END();
169 }
170 else
171 {
172 IEM_MC_BEGIN(1, 1);
173 IEM_MC_ARG(uint16_t, u16Sel, 0);
174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
176 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
177 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
178 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
179 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
180 IEM_MC_END();
181 }
182 return VINF_SUCCESS;
183}
184
185
186/** Opcode 0x0f 0x00 /3. */
187FNIEMOPRM_DEF(iemOp_Grp6_ltr)
188{
189 IEMOP_MNEMONIC(ltr, "ltr Ew");
190 IEMOP_HLP_MIN_286();
191 IEMOP_HLP_NO_REAL_OR_V86_MODE();
192
193 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
194 {
195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
196 IEM_MC_BEGIN(1, 0);
197 IEM_MC_ARG(uint16_t, u16Sel, 0);
198 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
199 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
200 IEM_MC_END();
201 }
202 else
203 {
204 IEM_MC_BEGIN(1, 1);
205 IEM_MC_ARG(uint16_t, u16Sel, 0);
206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
209 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
210 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
211 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
212 IEM_MC_END();
213 }
214 return VINF_SUCCESS;
215}
216
217
218/** Opcode 0x0f 0x00 /3. */
219FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
220{
221 IEMOP_HLP_MIN_286();
222 IEMOP_HLP_NO_REAL_OR_V86_MODE();
223
224 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
225 {
226 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
227 IEM_MC_BEGIN(2, 0);
228 IEM_MC_ARG(uint16_t, u16Sel, 0);
229 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
230 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
231 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
232 IEM_MC_END();
233 }
234 else
235 {
236 IEM_MC_BEGIN(2, 1);
237 IEM_MC_ARG(uint16_t, u16Sel, 0);
238 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
241 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
242 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
243 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
244 IEM_MC_END();
245 }
246 return VINF_SUCCESS;
247}
248
249
250/** Opcode 0x0f 0x00 /4. */
251FNIEMOPRM_DEF(iemOp_Grp6_verr)
252{
253 IEMOP_MNEMONIC(verr, "verr Ew");
254 IEMOP_HLP_MIN_286();
255 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
256}
257
258
259/** Opcode 0x0f 0x00 /5. */
260FNIEMOPRM_DEF(iemOp_Grp6_verw)
261{
262 IEMOP_MNEMONIC(verw, "verw Ew");
263 IEMOP_HLP_MIN_286();
264 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
265}
266
267
268/**
269 * Group 6 jump table.
270 */
271IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
272{
273 iemOp_Grp6_sldt,
274 iemOp_Grp6_str,
275 iemOp_Grp6_lldt,
276 iemOp_Grp6_ltr,
277 iemOp_Grp6_verr,
278 iemOp_Grp6_verw,
279 iemOp_InvalidWithRM,
280 iemOp_InvalidWithRM
281};
282
283/** Opcode 0x0f 0x00. */
284FNIEMOP_DEF(iemOp_Grp6)
285{
286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
287 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
288}
289
290
291/** Opcode 0x0f 0x01 /0. */
292FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
293{
294 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
295 IEMOP_HLP_MIN_286();
296 IEMOP_HLP_64BIT_OP_SIZE();
297 IEM_MC_BEGIN(2, 1);
298 IEM_MC_ARG(uint8_t, iEffSeg, 0);
299 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
302 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
303 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
304 IEM_MC_END();
305 return VINF_SUCCESS;
306}
307
308
309/** Opcode 0x0f 0x01 /0. */
310FNIEMOP_DEF(iemOp_Grp7_vmcall)
311{
312 IEMOP_BITCH_ABOUT_STUB();
313 return IEMOP_RAISE_INVALID_OPCODE();
314}
315
316
317/** Opcode 0x0f 0x01 /0. */
318FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
319{
320 IEMOP_BITCH_ABOUT_STUB();
321 return IEMOP_RAISE_INVALID_OPCODE();
322}
323
324
325/** Opcode 0x0f 0x01 /0. */
326FNIEMOP_DEF(iemOp_Grp7_vmresume)
327{
328 IEMOP_BITCH_ABOUT_STUB();
329 return IEMOP_RAISE_INVALID_OPCODE();
330}
331
332
333/** Opcode 0x0f 0x01 /0. */
334FNIEMOP_DEF(iemOp_Grp7_vmxoff)
335{
336 IEMOP_BITCH_ABOUT_STUB();
337 return IEMOP_RAISE_INVALID_OPCODE();
338}
339
340
341/** Opcode 0x0f 0x01 /1. */
342FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
343{
344 IEMOP_MNEMONIC(sidt, "sidt Ms");
345 IEMOP_HLP_MIN_286();
346 IEMOP_HLP_64BIT_OP_SIZE();
347 IEM_MC_BEGIN(2, 1);
348 IEM_MC_ARG(uint8_t, iEffSeg, 0);
349 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
352 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
353 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
354 IEM_MC_END();
355 return VINF_SUCCESS;
356}
357
358
359/** Opcode 0x0f 0x01 /1. */
360FNIEMOP_DEF(iemOp_Grp7_monitor)
361{
362 IEMOP_MNEMONIC(monitor, "monitor");
363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
364 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
365}
366
367
368/** Opcode 0x0f 0x01 /1. */
369FNIEMOP_DEF(iemOp_Grp7_mwait)
370{
371 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
373 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
374}
375
376
377/** Opcode 0x0f 0x01 /2. */
378FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
379{
380 IEMOP_MNEMONIC(lgdt, "lgdt");
381 IEMOP_HLP_64BIT_OP_SIZE();
382 IEM_MC_BEGIN(3, 1);
383 IEM_MC_ARG(uint8_t, iEffSeg, 0);
384 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
385 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
388 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
389 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
390 IEM_MC_END();
391 return VINF_SUCCESS;
392}
393
394
395/** Opcode 0x0f 0x01 0xd0. */
396FNIEMOP_DEF(iemOp_Grp7_xgetbv)
397{
398 IEMOP_MNEMONIC(xgetbv, "xgetbv");
399 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
400 {
401 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
402 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
403 }
404 return IEMOP_RAISE_INVALID_OPCODE();
405}
406
407
408/** Opcode 0x0f 0x01 0xd1. */
409FNIEMOP_DEF(iemOp_Grp7_xsetbv)
410{
411 IEMOP_MNEMONIC(xsetbv, "xsetbv");
412 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
413 {
414 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
416 }
417 return IEMOP_RAISE_INVALID_OPCODE();
418}
419
420
421/** Opcode 0x0f 0x01 /3. */
422FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
423{
424 IEMOP_MNEMONIC(lidt, "lidt");
425 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
426 ? IEMMODE_64BIT
427 : pVCpu->iem.s.enmEffOpSize;
428 IEM_MC_BEGIN(3, 1);
429 IEM_MC_ARG(uint8_t, iEffSeg, 0);
430 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
431 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
434 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
435 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
436 IEM_MC_END();
437 return VINF_SUCCESS;
438}
439
440
441#ifdef VBOX_WITH_NESTED_HWVIRT
442/** Opcode 0x0f 0x01 0xd8. */
443FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
444{
445 IEMOP_MNEMONIC(vmrun, "vmrun");
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
447}
448
449/** Opcode 0x0f 0x01 0xd9. */
450FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
451{
452 IEMOP_MNEMONIC(vmmcall, "vmmcall");
453 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
454}
455
456
457/** Opcode 0x0f 0x01 0xda. */
458FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
459{
460 IEMOP_MNEMONIC(vmload, "vmload");
461 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
462}
463
464
465/** Opcode 0x0f 0x01 0xdb. */
466FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
467{
468 IEMOP_MNEMONIC(vmsave, "vmsave");
469 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
470}
471
472
473/** Opcode 0x0f 0x01 0xdc. */
474FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
475{
476 IEMOP_MNEMONIC(stgi, "stgi");
477 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
478}
479
480
481/** Opcode 0x0f 0x01 0xdd. */
482FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
483{
484 IEMOP_MNEMONIC(clgi, "clgi");
485 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
486}
487
488
489/** Opcode 0x0f 0x01 0xdf. */
490FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
491{
492 IEMOP_MNEMONIC(invlpga, "invlpga");
493 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
494}
495#else
496/** Opcode 0x0f 0x01 0xd8. */
497FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
498
499/** Opcode 0x0f 0x01 0xd9. */
500FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
501
502/** Opcode 0x0f 0x01 0xda. */
503FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
504
505/** Opcode 0x0f 0x01 0xdb. */
506FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
507
508/** Opcode 0x0f 0x01 0xdc. */
509FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
510
511/** Opcode 0x0f 0x01 0xdd. */
512FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
513
514/** Opcode 0x0f 0x01 0xdf. */
515FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
516#endif /* VBOX_WITH_NESTED_HWVIRT */
517
518/** Opcode 0x0f 0x01 0xde. */
519FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
520
521/** Opcode 0x0f 0x01 /4. */
522FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
523{
524 IEMOP_MNEMONIC(smsw, "smsw");
525 IEMOP_HLP_MIN_286();
526 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
527 {
528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
529 switch (pVCpu->iem.s.enmEffOpSize)
530 {
531 case IEMMODE_16BIT:
532 IEM_MC_BEGIN(0, 1);
533 IEM_MC_LOCAL(uint16_t, u16Tmp);
534 IEM_MC_FETCH_CR0_U16(u16Tmp);
535 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
536 { /* likely */ }
537 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
538 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
539 else
540 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
541 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
542 IEM_MC_ADVANCE_RIP();
543 IEM_MC_END();
544 return VINF_SUCCESS;
545
546 case IEMMODE_32BIT:
547 IEM_MC_BEGIN(0, 1);
548 IEM_MC_LOCAL(uint32_t, u32Tmp);
549 IEM_MC_FETCH_CR0_U32(u32Tmp);
550 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
551 IEM_MC_ADVANCE_RIP();
552 IEM_MC_END();
553 return VINF_SUCCESS;
554
555 case IEMMODE_64BIT:
556 IEM_MC_BEGIN(0, 1);
557 IEM_MC_LOCAL(uint64_t, u64Tmp);
558 IEM_MC_FETCH_CR0_U64(u64Tmp);
559 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
560 IEM_MC_ADVANCE_RIP();
561 IEM_MC_END();
562 return VINF_SUCCESS;
563
564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
565 }
566 }
567 else
568 {
569 /* Ignore operand size here, memory refs are always 16-bit. */
570 IEM_MC_BEGIN(0, 2);
571 IEM_MC_LOCAL(uint16_t, u16Tmp);
572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
575 IEM_MC_FETCH_CR0_U16(u16Tmp);
576 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
577 { /* likely */ }
578 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
579 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
580 else
581 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
582 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
583 IEM_MC_ADVANCE_RIP();
584 IEM_MC_END();
585 return VINF_SUCCESS;
586 }
587}
588
589
590/** Opcode 0x0f 0x01 /6. */
591FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
592{
593 /* The operand size is effectively ignored, all is 16-bit and only the
594 lower 3-bits are used. */
595 IEMOP_MNEMONIC(lmsw, "lmsw");
596 IEMOP_HLP_MIN_286();
597 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
598 {
599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
600 IEM_MC_BEGIN(1, 0);
601 IEM_MC_ARG(uint16_t, u16Tmp, 0);
602 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
603 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
604 IEM_MC_END();
605 }
606 else
607 {
608 IEM_MC_BEGIN(1, 1);
609 IEM_MC_ARG(uint16_t, u16Tmp, 0);
610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
613 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
614 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
615 IEM_MC_END();
616 }
617 return VINF_SUCCESS;
618}
619
620
621/** Opcode 0x0f 0x01 /7. */
622FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
623{
624 IEMOP_MNEMONIC(invlpg, "invlpg");
625 IEMOP_HLP_MIN_486();
626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
627 IEM_MC_BEGIN(1, 1);
628 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
630 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
631 IEM_MC_END();
632 return VINF_SUCCESS;
633}
634
635
636/** Opcode 0x0f 0x01 /7. */
637FNIEMOP_DEF(iemOp_Grp7_swapgs)
638{
639 IEMOP_MNEMONIC(swapgs, "swapgs");
640 IEMOP_HLP_ONLY_64BIT();
641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
642 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
643}
644
645
646/** Opcode 0x0f 0x01 /7. */
647FNIEMOP_DEF(iemOp_Grp7_rdtscp)
648{
649 NOREF(pVCpu);
650 IEMOP_BITCH_ABOUT_STUB();
651 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
652}
653
654
655/**
656 * Group 7 jump table, memory variant.
657 */
658IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
659{
660 iemOp_Grp7_sgdt,
661 iemOp_Grp7_sidt,
662 iemOp_Grp7_lgdt,
663 iemOp_Grp7_lidt,
664 iemOp_Grp7_smsw,
665 iemOp_InvalidWithRM,
666 iemOp_Grp7_lmsw,
667 iemOp_Grp7_invlpg
668};
669
670
671/** Opcode 0x0f 0x01. */
672FNIEMOP_DEF(iemOp_Grp7)
673{
674 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
675 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
676 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
677
678 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
679 {
680 case 0:
681 switch (bRm & X86_MODRM_RM_MASK)
682 {
683 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
684 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
685 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
686 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
687 }
688 return IEMOP_RAISE_INVALID_OPCODE();
689
690 case 1:
691 switch (bRm & X86_MODRM_RM_MASK)
692 {
693 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
694 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
695 }
696 return IEMOP_RAISE_INVALID_OPCODE();
697
698 case 2:
699 switch (bRm & X86_MODRM_RM_MASK)
700 {
701 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
702 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
703 }
704 return IEMOP_RAISE_INVALID_OPCODE();
705
706 case 3:
707 switch (bRm & X86_MODRM_RM_MASK)
708 {
709 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
710 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
711 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
712 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
713 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
714 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
715 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
716 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
717 IEM_NOT_REACHED_DEFAULT_CASE_RET();
718 }
719
720 case 4:
721 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
722
723 case 5:
724 return IEMOP_RAISE_INVALID_OPCODE();
725
726 case 6:
727 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
728
729 case 7:
730 switch (bRm & X86_MODRM_RM_MASK)
731 {
732 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
733 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
734 }
735 return IEMOP_RAISE_INVALID_OPCODE();
736
737 IEM_NOT_REACHED_DEFAULT_CASE_RET();
738 }
739}
740
741/** Opcode 0x0f 0x00 /3. */
742FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
743{
744 IEMOP_HLP_NO_REAL_OR_V86_MODE();
745 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
746
747 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
748 {
749 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
750 switch (pVCpu->iem.s.enmEffOpSize)
751 {
752 case IEMMODE_16BIT:
753 {
754 IEM_MC_BEGIN(3, 0);
755 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
756 IEM_MC_ARG(uint16_t, u16Sel, 1);
757 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
758
759 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
760 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
761 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
762
763 IEM_MC_END();
764 return VINF_SUCCESS;
765 }
766
767 case IEMMODE_32BIT:
768 case IEMMODE_64BIT:
769 {
770 IEM_MC_BEGIN(3, 0);
771 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
772 IEM_MC_ARG(uint16_t, u16Sel, 1);
773 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
774
775 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
776 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
777 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
778
779 IEM_MC_END();
780 return VINF_SUCCESS;
781 }
782
783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
784 }
785 }
786 else
787 {
788 switch (pVCpu->iem.s.enmEffOpSize)
789 {
790 case IEMMODE_16BIT:
791 {
792 IEM_MC_BEGIN(3, 1);
793 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
794 IEM_MC_ARG(uint16_t, u16Sel, 1);
795 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
797
798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
799 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
800
801 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
802 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
803 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
804
805 IEM_MC_END();
806 return VINF_SUCCESS;
807 }
808
809 case IEMMODE_32BIT:
810 case IEMMODE_64BIT:
811 {
812 IEM_MC_BEGIN(3, 1);
813 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
814 IEM_MC_ARG(uint16_t, u16Sel, 1);
815 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
817
818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
819 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
820/** @todo testcase: make sure it's a 16-bit read. */
821
822 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
823 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
824 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
825
826 IEM_MC_END();
827 return VINF_SUCCESS;
828 }
829
830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
831 }
832 }
833}
834
835
836
837/** Opcode 0x0f 0x02. */
838FNIEMOP_DEF(iemOp_lar_Gv_Ew)
839{
840 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
841 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
842}
843
844
845/** Opcode 0x0f 0x03. */
846FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
847{
848 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
849 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
850}
851
852
853/** Opcode 0x0f 0x05. */
854FNIEMOP_DEF(iemOp_syscall)
855{
856 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
858 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
859}
860
861
862/** Opcode 0x0f 0x06. */
863FNIEMOP_DEF(iemOp_clts)
864{
865 IEMOP_MNEMONIC(clts, "clts");
866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
867 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
868}
869
870
871/** Opcode 0x0f 0x07. */
872FNIEMOP_DEF(iemOp_sysret)
873{
874 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
876 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
877}
878
879
880/** Opcode 0x0f 0x08. */
881FNIEMOP_STUB(iemOp_invd);
882// IEMOP_HLP_MIN_486();
883
884
885/** Opcode 0x0f 0x09. */
886FNIEMOP_DEF(iemOp_wbinvd)
887{
888 IEMOP_MNEMONIC(wbinvd, "wbinvd");
889 IEMOP_HLP_MIN_486();
890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
891 IEM_MC_BEGIN(0, 0);
892 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
893 IEM_MC_ADVANCE_RIP();
894 IEM_MC_END();
895 return VINF_SUCCESS; /* ignore for now */
896}
897
898
899/** Opcode 0x0f 0x0b. */
900FNIEMOP_DEF(iemOp_ud2)
901{
902 IEMOP_MNEMONIC(ud2, "ud2");
903 return IEMOP_RAISE_INVALID_OPCODE();
904}
905
906/** Opcode 0x0f 0x0d. */
907FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
908{
909 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
910 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
911 {
912 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
913 return IEMOP_RAISE_INVALID_OPCODE();
914 }
915
916 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
917 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
918 {
919 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
920 return IEMOP_RAISE_INVALID_OPCODE();
921 }
922
923 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
924 {
925 case 2: /* Aliased to /0 for the time being. */
926 case 4: /* Aliased to /0 for the time being. */
927 case 5: /* Aliased to /0 for the time being. */
928 case 6: /* Aliased to /0 for the time being. */
929 case 7: /* Aliased to /0 for the time being. */
930 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
931 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
932 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
933 IEM_NOT_REACHED_DEFAULT_CASE_RET();
934 }
935
936 IEM_MC_BEGIN(0, 1);
937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
940 /* Currently a NOP. */
941 NOREF(GCPtrEffSrc);
942 IEM_MC_ADVANCE_RIP();
943 IEM_MC_END();
944 return VINF_SUCCESS;
945}
946
947
948/** Opcode 0x0f 0x0e. */
949FNIEMOP_STUB(iemOp_femms);
950
951
952/** Opcode 0x0f 0x0f 0x0c. */
953FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
954
955/** Opcode 0x0f 0x0f 0x0d. */
956FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
957
958/** Opcode 0x0f 0x0f 0x1c. */
959FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
960
961/** Opcode 0x0f 0x0f 0x1d. */
962FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
963
964/** Opcode 0x0f 0x0f 0x8a. */
965FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
966
967/** Opcode 0x0f 0x0f 0x8e. */
968FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
969
970/** Opcode 0x0f 0x0f 0x90. */
971FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
972
973/** Opcode 0x0f 0x0f 0x94. */
974FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
975
976/** Opcode 0x0f 0x0f 0x96. */
977FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
978
979/** Opcode 0x0f 0x0f 0x97. */
980FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
981
982/** Opcode 0x0f 0x0f 0x9a. */
983FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
984
985/** Opcode 0x0f 0x0f 0x9e. */
986FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
987
988/** Opcode 0x0f 0x0f 0xa0. */
989FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
990
991/** Opcode 0x0f 0x0f 0xa4. */
992FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
993
994/** Opcode 0x0f 0x0f 0xa6. */
995FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
996
997/** Opcode 0x0f 0x0f 0xa7. */
998FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
999
1000/** Opcode 0x0f 0x0f 0xaa. */
1001FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1002
1003/** Opcode 0x0f 0x0f 0xae. */
1004FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1005
1006/** Opcode 0x0f 0x0f 0xb0. */
1007FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1008
1009/** Opcode 0x0f 0x0f 0xb4. */
1010FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1011
1012/** Opcode 0x0f 0x0f 0xb6. */
1013FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1014
1015/** Opcode 0x0f 0x0f 0xb7. */
1016FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1017
1018/** Opcode 0x0f 0x0f 0xbb. */
1019FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1020
1021/** Opcode 0x0f 0x0f 0xbf. */
1022FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1023
1024
1025/** Opcode 0x0f 0x0f. */
1026FNIEMOP_DEF(iemOp_3Dnow)
1027{
1028 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1029 {
1030 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1031 return IEMOP_RAISE_INVALID_OPCODE();
1032 }
1033
1034 /* This is pretty sparse, use switch instead of table. */
1035 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1036 switch (b)
1037 {
1038 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1039 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1040 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1041 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1042 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1043 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1044 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1045 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1046 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1047 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1048 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1049 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1050 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1051 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1052 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1053 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1054 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1055 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1056 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1057 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1058 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1059 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1060 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1061 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1062 default:
1063 return IEMOP_RAISE_INVALID_OPCODE();
1064 }
1065}
1066
1067
1068/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1069FNIEMOP_STUB(iemOp_vmovups_Vps_Wps);
1070/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1071FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
1072/** Opcode 0xf3 0x0f 0x10 - vmovss Vx, Hx, Wss */
1073FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss);
1074/** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1075FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
1076
1077
1078/**
1079 * @opcode 0x11
1080 * @oppfx none
1081 * @opcpuid sse
1082 * @opgroup og_sse_simdfp_datamove
1083 * @opxcpttype 4UA
1084 * @optest op1=1 op2=2 -> op1=2
1085 * @optest op1=0 op2=-42 -> op1=-42
1086 */
1087FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
1088{
1089 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1091 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1092 {
1093 /*
1094 * Register, register.
1095 */
1096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1097 IEM_MC_BEGIN(0, 0);
1098 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1099 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1100 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1101 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1102 IEM_MC_ADVANCE_RIP();
1103 IEM_MC_END();
1104 }
1105 else
1106 {
1107 /*
1108 * Memory, register.
1109 */
1110 IEM_MC_BEGIN(0, 2);
1111 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1113
1114 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1116 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1117 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1118
1119 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1120 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1121
1122 IEM_MC_ADVANCE_RIP();
1123 IEM_MC_END();
1124 }
1125 return VINF_SUCCESS;
1126}
1127
1128
1129/**
1130 * @opcode 0x11
1131 * @oppfx 0x66
1132 * @opcpuid sse2
1133 * @opgroup og_sse2_pcksclr_datamove
1134 * @opxcpttype 4UA
1135 * @optest op1=1 op2=2 -> op1=2
1136 * @optest op1=0 op2=-42 -> op1=-42
1137 */
1138FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
1139{
1140 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1141 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1142 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1143 {
1144 /*
1145 * Register, register.
1146 */
1147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1148 IEM_MC_BEGIN(0, 0);
1149 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1150 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1151 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1152 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1153 IEM_MC_ADVANCE_RIP();
1154 IEM_MC_END();
1155 }
1156 else
1157 {
1158 /*
1159 * Memory, register.
1160 */
1161 IEM_MC_BEGIN(0, 2);
1162 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1164
1165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1167 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1168 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1169
1170 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1171 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1172
1173 IEM_MC_ADVANCE_RIP();
1174 IEM_MC_END();
1175 }
1176 return VINF_SUCCESS;
1177}
1178
1179
1180/**
1181 * @opcode 0x11
1182 * @oppfx 0xf3
1183 * @opcpuid sse
1184 * @opgroup og_sse_simdfp_datamove
1185 * @opxcpttype 5
1186 * @optest op1=1 op2=2 -> op1=2
1187 * @optest op1=0 op2=-22 -> op1=-22
1188 */
1189FNIEMOP_DEF(iemOp_vmovss_Wss_Hx_Vss)
1190{
1191 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1192 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1193 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1194 {
1195 /*
1196 * Register, register.
1197 */
1198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1199 IEM_MC_BEGIN(0, 1);
1200 IEM_MC_LOCAL(uint32_t, uSrc);
1201
1202 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1203 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1204 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1205 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1206
1207 IEM_MC_ADVANCE_RIP();
1208 IEM_MC_END();
1209 }
1210 else
1211 {
1212 /*
1213 * Memory, register.
1214 */
1215 IEM_MC_BEGIN(0, 2);
1216 IEM_MC_LOCAL(uint32_t, uSrc);
1217 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1218
1219 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1221 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1222 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1223
1224 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1225 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1226
1227 IEM_MC_ADVANCE_RIP();
1228 IEM_MC_END();
1229 }
1230 return VINF_SUCCESS;
1231}
1232
1233
1234/**
1235 * @opcode 0x11
1236 * @oppfx 0xf2
1237 * @opcpuid sse2
1238 * @opgroup og_sse2_pcksclr_datamove
1239 * @opxcpttype 5
1240 * @optest op1=1 op2=2 -> op1=2
1241 * @optest op1=0 op2=-42 -> op1=-42
1242 */
1243FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
1244{
1245 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1246 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1247 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1248 {
1249 /*
1250 * Register, register.
1251 */
1252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1253 IEM_MC_BEGIN(0, 1);
1254 IEM_MC_LOCAL(uint64_t, uSrc);
1255
1256 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1257 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1258 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1259 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1260
1261 IEM_MC_ADVANCE_RIP();
1262 IEM_MC_END();
1263 }
1264 else
1265 {
1266 /*
1267 * Memory, register.
1268 */
1269 IEM_MC_BEGIN(0, 2);
1270 IEM_MC_LOCAL(uint64_t, uSrc);
1271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1272
1273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1275 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1276 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1277
1278 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1279 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1280
1281 IEM_MC_ADVANCE_RIP();
1282 IEM_MC_END();
1283 }
1284 return VINF_SUCCESS;
1285}
1286
1287
1288FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
1289{
1290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1291 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1292 {
1293 /**
1294 * @opcode 0x12
1295 * @opcodesub 11 mr/reg
1296 * @oppfx none
1297 * @opcpuid sse
1298 * @opgroup og_sse_simdfp_datamove
1299 * @opxcpttype 5
1300 * @optest op1=1 op2=2 -> op1=2
1301 * @optest op1=0 op2=-42 -> op1=-42
1302 */
1303 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1304
1305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1306 IEM_MC_BEGIN(0, 1);
1307 IEM_MC_LOCAL(uint64_t, uSrc);
1308
1309 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1310 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1311 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1312 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1313
1314 IEM_MC_ADVANCE_RIP();
1315 IEM_MC_END();
1316 }
1317 else
1318 {
1319 /**
1320 * @opdone
1321 * @opcode 0x12
1322 * @opcodesub !11 mr/reg
1323 * @oppfx none
1324 * @opcpuid sse
1325 * @opgroup og_sse_simdfp_datamove
1326 * @opxcpttype 5
1327 * @optest op1=1 op2=2 -> op1=2
1328 * @optest op1=0 op2=-42 -> op1=-42
1329 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1330 */
1331 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1332
1333 IEM_MC_BEGIN(0, 2);
1334 IEM_MC_LOCAL(uint64_t, uSrc);
1335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1336
1337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1339 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1340 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1341
1342 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1343 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1344
1345 IEM_MC_ADVANCE_RIP();
1346 IEM_MC_END();
1347 }
1348 return VINF_SUCCESS;
1349}
1350
1351
1352/**
1353 * @opcode 0x12
1354 * @opcodesub !11 mr/reg
1355 * @oppfx 0x66
1356 * @opcpuid sse2
1357 * @opgroup og_sse2_pcksclr_datamove
1358 * @opxcpttype 5
1359 * @optest op1=1 op2=2 -> op1=2
1360 * @optest op1=0 op2=-42 -> op1=-42
1361 */
1362FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1363{
1364 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1365 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1366 {
1367 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1368
1369 IEM_MC_BEGIN(0, 2);
1370 IEM_MC_LOCAL(uint64_t, uSrc);
1371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1372
1373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1375 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1376 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1377
1378 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1379 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1380
1381 IEM_MC_ADVANCE_RIP();
1382 IEM_MC_END();
1383 return VINF_SUCCESS;
1384 }
1385
1386 /**
1387 * @opdone
1388 * @opmnemonic ud660f12m3
1389 * @opcode 0x12
1390 * @opcodesub 11 mr/reg
1391 * @oppfx 0x66
1392 * @opunused immediate
1393 * @opcpuid sse
1394 * @optest ->
1395 */
1396 return IEMOP_RAISE_INVALID_OPCODE();
1397}
1398
1399#if 0
1400FNIEMOP_STUB(iemOp_vmovsldup_Vx_Wx); //NEXT
1401#else
1402/**
1403 * opcode 0x12
1404 * opcodesub !11 mr/reg
1405 * oppfx 0xf3
1406 * opcpuid sse3
1407 * opgroup og_sse3_pcksclr_datamove
1408 * opxcpttype 4
1409 * optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1410 * op1=0x00000002000000020000000100000001
1411 */
1412FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1413{
1414 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1416 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1417 {
1418 /*
1419 * Register, register.
1420 */
1421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1422 IEM_MC_BEGIN(2, 0);
1423 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1424 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1425
1426 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1427 IEM_MC_PREPARE_SSE_USAGE();
1428
1429 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1430 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1431 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1432
1433 IEM_MC_ADVANCE_RIP();
1434 IEM_MC_END();
1435 }
1436 else
1437 {
1438 /*
1439 * Register, memory.
1440 */
1441 IEM_MC_BEGIN(2, 2);
1442 IEM_MC_LOCAL(RTUINT128U, uSrc);
1443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1444 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1445 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1446
1447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1449 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1450 IEM_MC_PREPARE_SSE_USAGE();
1451
1452 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1453 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1454 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1455
1456 IEM_MC_ADVANCE_RIP();
1457 IEM_MC_END();
1458 }
1459 return VINF_SUCCESS;
1460
1461}
1462#endif
1463
1464/** Opcode 0xf2 0x0f 0x12. */
1465FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); //NEXT
1466
1467/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1468FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq);
1469
1470/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1471FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1472{
1473 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1474 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1475 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1476 {
1477#if 0
1478 /*
1479 * Register, register.
1480 */
1481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1482 IEM_MC_BEGIN(0, 1);
1483 IEM_MC_LOCAL(uint64_t, uSrc);
1484 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1485 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1486 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1487 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1488 IEM_MC_ADVANCE_RIP();
1489 IEM_MC_END();
1490#else
1491 return IEMOP_RAISE_INVALID_OPCODE();
1492#endif
1493 }
1494 else
1495 {
1496 /*
1497 * Memory, register.
1498 */
1499 IEM_MC_BEGIN(0, 2);
1500 IEM_MC_LOCAL(uint64_t, uSrc);
1501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1502
1503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1505 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1506 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1507
1508 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1509 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1510
1511 IEM_MC_ADVANCE_RIP();
1512 IEM_MC_END();
1513 }
1514 return VINF_SUCCESS;
1515}
1516
1517/* Opcode 0xf3 0x0f 0x13 - invalid */
1518/* Opcode 0xf2 0x0f 0x13 - invalid */
1519
1520/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1521FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1522/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1523FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1524/* Opcode 0xf3 0x0f 0x14 - invalid */
1525/* Opcode 0xf2 0x0f 0x14 - invalid */
1526/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1527FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1528/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1529FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1530/* Opcode 0xf3 0x0f 0x15 - invalid */
1531/* Opcode 0xf2 0x0f 0x15 - invalid */
1532/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1533FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1534/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1535FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1536/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1537FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1538/* Opcode 0xf2 0x0f 0x16 - invalid */
1539/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1540FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1541/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1542FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1543/* Opcode 0xf3 0x0f 0x17 - invalid */
1544/* Opcode 0xf2 0x0f 0x17 - invalid */
1545
1546
1547/** Opcode 0x0f 0x18. */
1548FNIEMOP_DEF(iemOp_prefetch_Grp16)
1549{
1550 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1551 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1552 {
1553 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1554 {
1555 case 4: /* Aliased to /0 for the time being according to AMD. */
1556 case 5: /* Aliased to /0 for the time being according to AMD. */
1557 case 6: /* Aliased to /0 for the time being according to AMD. */
1558 case 7: /* Aliased to /0 for the time being according to AMD. */
1559 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1560 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1561 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1562 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1563 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1564 }
1565
1566 IEM_MC_BEGIN(0, 1);
1567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1570 /* Currently a NOP. */
1571 NOREF(GCPtrEffSrc);
1572 IEM_MC_ADVANCE_RIP();
1573 IEM_MC_END();
1574 return VINF_SUCCESS;
1575 }
1576
1577 return IEMOP_RAISE_INVALID_OPCODE();
1578}
1579
1580
1581/** Opcode 0x0f 0x19..0x1f. */
1582FNIEMOP_DEF(iemOp_nop_Ev)
1583{
1584 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1585 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1586 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1587 {
1588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1589 IEM_MC_BEGIN(0, 0);
1590 IEM_MC_ADVANCE_RIP();
1591 IEM_MC_END();
1592 }
1593 else
1594 {
1595 IEM_MC_BEGIN(0, 1);
1596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1599 /* Currently a NOP. */
1600 NOREF(GCPtrEffSrc);
1601 IEM_MC_ADVANCE_RIP();
1602 IEM_MC_END();
1603 }
1604 return VINF_SUCCESS;
1605}
1606
1607
1608/** Opcode 0x0f 0x20. */
1609FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1610{
1611 /* mod is ignored, as is operand size overrides. */
1612 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1613 IEMOP_HLP_MIN_386();
1614 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1615 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1616 else
1617 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1618
1619 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1620 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1621 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1622 {
1623 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1624 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1625 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1626 iCrReg |= 8;
1627 }
1628 switch (iCrReg)
1629 {
1630 case 0: case 2: case 3: case 4: case 8:
1631 break;
1632 default:
1633 return IEMOP_RAISE_INVALID_OPCODE();
1634 }
1635 IEMOP_HLP_DONE_DECODING();
1636
1637 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1638}
1639
1640
1641/** Opcode 0x0f 0x21. */
1642FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1643{
1644 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1645 IEMOP_HLP_MIN_386();
1646 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1648 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1649 return IEMOP_RAISE_INVALID_OPCODE();
1650 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1651 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1652 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1653}
1654
1655
1656/** Opcode 0x0f 0x22. */
1657FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1658{
1659 /* mod is ignored, as is operand size overrides. */
1660 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1661 IEMOP_HLP_MIN_386();
1662 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1663 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1664 else
1665 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1666
1667 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1668 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1669 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1670 {
1671 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1672 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1673 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1674 iCrReg |= 8;
1675 }
1676 switch (iCrReg)
1677 {
1678 case 0: case 2: case 3: case 4: case 8:
1679 break;
1680 default:
1681 return IEMOP_RAISE_INVALID_OPCODE();
1682 }
1683 IEMOP_HLP_DONE_DECODING();
1684
1685 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1686}
1687
1688
1689/** Opcode 0x0f 0x23. */
1690FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1691{
1692 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1693 IEMOP_HLP_MIN_386();
1694 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1696 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1697 return IEMOP_RAISE_INVALID_OPCODE();
1698 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1699 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1700 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1701}
1702
1703
1704/** Opcode 0x0f 0x24. */
1705FNIEMOP_DEF(iemOp_mov_Rd_Td)
1706{
1707 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1708 /** @todo works on 386 and 486. */
1709 /* The RM byte is not considered, see testcase. */
1710 return IEMOP_RAISE_INVALID_OPCODE();
1711}
1712
1713
1714/** Opcode 0x0f 0x26. */
1715FNIEMOP_DEF(iemOp_mov_Td_Rd)
1716{
1717 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1718 /** @todo works on 386 and 486. */
1719 /* The RM byte is not considered, see testcase. */
1720 return IEMOP_RAISE_INVALID_OPCODE();
1721}
1722
1723
1724/** Opcode 0x0f 0x28 - vmovaps Vps, Wps */
1725FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1726{
1727 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1728 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1729 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1730 {
1731 /*
1732 * Register, register.
1733 */
1734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1735 IEM_MC_BEGIN(0, 0);
1736 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1737 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1738 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1739 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1740 IEM_MC_ADVANCE_RIP();
1741 IEM_MC_END();
1742 }
1743 else
1744 {
1745 /*
1746 * Register, memory.
1747 */
1748 IEM_MC_BEGIN(0, 2);
1749 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1751
1752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1754 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1755 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1756
1757 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1758 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1759
1760 IEM_MC_ADVANCE_RIP();
1761 IEM_MC_END();
1762 }
1763 return VINF_SUCCESS;
1764}
1765
1766/** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */
1767FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1768{
1769 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1771 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1772 {
1773 /*
1774 * Register, register.
1775 */
1776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1777 IEM_MC_BEGIN(0, 0);
1778 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1779 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1780 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1781 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1782 IEM_MC_ADVANCE_RIP();
1783 IEM_MC_END();
1784 }
1785 else
1786 {
1787 /*
1788 * Register, memory.
1789 */
1790 IEM_MC_BEGIN(0, 2);
1791 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1793
1794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1796 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1797 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1798
1799 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1800 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1801
1802 IEM_MC_ADVANCE_RIP();
1803 IEM_MC_END();
1804 }
1805 return VINF_SUCCESS;
1806}
1807
1808/* Opcode 0xf3 0x0f 0x28 - invalid */
1809/* Opcode 0xf2 0x0f 0x28 - invalid */
1810
1811/** Opcode 0x0f 0x29 - vmovaps Wps, Vps */
1812FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1813{
1814 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1815 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1816 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1817 {
1818 /*
1819 * Register, register.
1820 */
1821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1822 IEM_MC_BEGIN(0, 0);
1823 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1824 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1825 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1826 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1827 IEM_MC_ADVANCE_RIP();
1828 IEM_MC_END();
1829 }
1830 else
1831 {
1832 /*
1833 * Memory, register.
1834 */
1835 IEM_MC_BEGIN(0, 2);
1836 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1838
1839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1841 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1842 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1843
1844 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1845 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1846
1847 IEM_MC_ADVANCE_RIP();
1848 IEM_MC_END();
1849 }
1850 return VINF_SUCCESS;
1851}
1852
1853/** Opcode 0x66 0x0f 0x29 - vmovapd Wpd,Vpd */
1854FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
1855{
1856 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
1857 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1858 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1859 {
1860 /*
1861 * Register, register.
1862 */
1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1864 IEM_MC_BEGIN(0, 0);
1865 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1866 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1867 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1868 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1869 IEM_MC_ADVANCE_RIP();
1870 IEM_MC_END();
1871 }
1872 else
1873 {
1874 /*
1875 * Memory, register.
1876 */
1877 IEM_MC_BEGIN(0, 2);
1878 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1880
1881 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1883 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1884 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1885
1886 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1887 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1888
1889 IEM_MC_ADVANCE_RIP();
1890 IEM_MC_END();
1891 }
1892 return VINF_SUCCESS;
1893}
1894
1895/* Opcode 0xf3 0x0f 0x29 - invalid */
1896/* Opcode 0xf2 0x0f 0x29 - invalid */
1897
1898
1899/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
1900FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
1901/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
1902FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
1903/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
1904FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
1905/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
1906FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
1907
1908
1909/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
1910FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
1911{
1912 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
1913 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1914 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1915 {
1916 /*
1917 * memory, register.
1918 */
1919 IEM_MC_BEGIN(0, 2);
1920 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1922
1923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1925 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1926 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1927
1928 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1929 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1930
1931 IEM_MC_ADVANCE_RIP();
1932 IEM_MC_END();
1933 }
1934 /* The register, register encoding is invalid. */
1935 else
1936 return IEMOP_RAISE_INVALID_OPCODE();
1937 return VINF_SUCCESS;
1938}
1939
1940/** Opcode 0x66 0x0f 0x2b - vmovntpd Mpd, Vpd */
1941FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
1942{
1943 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
1944 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1945 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1946 {
1947 /*
1948 * memory, register.
1949 */
1950 IEM_MC_BEGIN(0, 2);
1951 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1952 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1953
1954 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1956 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1957 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1958
1959 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1960 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1961
1962 IEM_MC_ADVANCE_RIP();
1963 IEM_MC_END();
1964 }
1965 /* The register, register encoding is invalid. */
1966 else
1967 return IEMOP_RAISE_INVALID_OPCODE();
1968 return VINF_SUCCESS;
1969}
1970/* Opcode 0xf3 0x0f 0x2b - invalid */
1971/* Opcode 0xf2 0x0f 0x2b - invalid */
1972
1973
1974/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
1975FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
1976/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
1977FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
1978/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
1979FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
1980/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
1981FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
1982
1983/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
1984FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
1985/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
1986FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
1987/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
1988FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
1989/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
1990FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
1991
1992/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
1993FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
1994/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
1995FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
1996/* Opcode 0xf3 0x0f 0x2e - invalid */
1997/* Opcode 0xf2 0x0f 0x2e - invalid */
1998
1999/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
2000FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
2001/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
2002FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
2003/* Opcode 0xf3 0x0f 0x2f - invalid */
2004/* Opcode 0xf2 0x0f 0x2f - invalid */
2005
2006/** Opcode 0x0f 0x30. */
2007FNIEMOP_DEF(iemOp_wrmsr)
2008{
2009 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2011 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2012}
2013
2014
2015/** Opcode 0x0f 0x31. */
2016FNIEMOP_DEF(iemOp_rdtsc)
2017{
2018 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2020 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2021}
2022
2023
2024/** Opcode 0x0f 0x33. */
2025FNIEMOP_DEF(iemOp_rdmsr)
2026{
2027 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2029 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2030}
2031
2032
2033/** Opcode 0x0f 0x34. */
2034FNIEMOP_STUB(iemOp_rdpmc);
2035/** Opcode 0x0f 0x34. */
2036FNIEMOP_STUB(iemOp_sysenter);
2037/** Opcode 0x0f 0x35. */
2038FNIEMOP_STUB(iemOp_sysexit);
2039/** Opcode 0x0f 0x37. */
2040FNIEMOP_STUB(iemOp_getsec);
2041/** Opcode 0x0f 0x38. */
2042FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
2043/** Opcode 0x0f 0x3a. */
2044FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
2045
2046
2047/**
2048 * Implements a conditional move.
2049 *
2050 * Wish there was an obvious way to do this where we could share and reduce
2051 * code bloat.
2052 *
2053 * @param a_Cnd The conditional "microcode" operation.
2054 */
2055#define CMOV_X(a_Cnd) \
2056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2057 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2058 { \
2059 switch (pVCpu->iem.s.enmEffOpSize) \
2060 { \
2061 case IEMMODE_16BIT: \
2062 IEM_MC_BEGIN(0, 1); \
2063 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2064 a_Cnd { \
2065 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2066 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2067 } IEM_MC_ENDIF(); \
2068 IEM_MC_ADVANCE_RIP(); \
2069 IEM_MC_END(); \
2070 return VINF_SUCCESS; \
2071 \
2072 case IEMMODE_32BIT: \
2073 IEM_MC_BEGIN(0, 1); \
2074 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2075 a_Cnd { \
2076 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2077 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2078 } IEM_MC_ELSE() { \
2079 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2080 } IEM_MC_ENDIF(); \
2081 IEM_MC_ADVANCE_RIP(); \
2082 IEM_MC_END(); \
2083 return VINF_SUCCESS; \
2084 \
2085 case IEMMODE_64BIT: \
2086 IEM_MC_BEGIN(0, 1); \
2087 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2088 a_Cnd { \
2089 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2090 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2091 } IEM_MC_ENDIF(); \
2092 IEM_MC_ADVANCE_RIP(); \
2093 IEM_MC_END(); \
2094 return VINF_SUCCESS; \
2095 \
2096 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2097 } \
2098 } \
2099 else \
2100 { \
2101 switch (pVCpu->iem.s.enmEffOpSize) \
2102 { \
2103 case IEMMODE_16BIT: \
2104 IEM_MC_BEGIN(0, 2); \
2105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2106 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2108 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2109 a_Cnd { \
2110 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2111 } IEM_MC_ENDIF(); \
2112 IEM_MC_ADVANCE_RIP(); \
2113 IEM_MC_END(); \
2114 return VINF_SUCCESS; \
2115 \
2116 case IEMMODE_32BIT: \
2117 IEM_MC_BEGIN(0, 2); \
2118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2119 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2121 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2122 a_Cnd { \
2123 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2124 } IEM_MC_ELSE() { \
2125 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2126 } IEM_MC_ENDIF(); \
2127 IEM_MC_ADVANCE_RIP(); \
2128 IEM_MC_END(); \
2129 return VINF_SUCCESS; \
2130 \
2131 case IEMMODE_64BIT: \
2132 IEM_MC_BEGIN(0, 2); \
2133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2134 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2136 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2137 a_Cnd { \
2138 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2139 } IEM_MC_ENDIF(); \
2140 IEM_MC_ADVANCE_RIP(); \
2141 IEM_MC_END(); \
2142 return VINF_SUCCESS; \
2143 \
2144 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2145 } \
2146 } do {} while (0)
2147
2148
2149
2150/** Opcode 0x0f 0x40. */
2151FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2152{
2153 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2154 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2155}
2156
2157
2158/** Opcode 0x0f 0x41. */
2159FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2160{
2161 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2162 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2163}
2164
2165
2166/** Opcode 0x0f 0x42. */
2167FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2168{
2169 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2170 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2171}
2172
2173
2174/** Opcode 0x0f 0x43. */
2175FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2176{
2177 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2178 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2179}
2180
2181
2182/** Opcode 0x0f 0x44. */
2183FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2184{
2185 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2186 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2187}
2188
2189
2190/** Opcode 0x0f 0x45. */
2191FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2192{
2193 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2194 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2195}
2196
2197
2198/** Opcode 0x0f 0x46. */
2199FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2200{
2201 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2202 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2203}
2204
2205
2206/** Opcode 0x0f 0x47. */
2207FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2208{
2209 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2210 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2211}
2212
2213
2214/** Opcode 0x0f 0x48. */
2215FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2216{
2217 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2218 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2219}
2220
2221
2222/** Opcode 0x0f 0x49. */
2223FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2224{
2225 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2226 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2227}
2228
2229
2230/** Opcode 0x0f 0x4a. */
2231FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2232{
2233 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2234 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2235}
2236
2237
2238/** Opcode 0x0f 0x4b. */
2239FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2240{
2241 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2242 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2243}
2244
2245
2246/** Opcode 0x0f 0x4c. */
2247FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2248{
2249 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2250 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2251}
2252
2253
2254/** Opcode 0x0f 0x4d. */
2255FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2256{
2257 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2258 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2259}
2260
2261
2262/** Opcode 0x0f 0x4e. */
2263FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2264{
2265 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2266 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2267}
2268
2269
2270/** Opcode 0x0f 0x4f. */
2271FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2272{
2273 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2274 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2275}
2276
2277#undef CMOV_X
2278
2279/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
2280FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
2281/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
2282FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
2283/* Opcode 0xf3 0x0f 0x50 - invalid */
2284/* Opcode 0xf2 0x0f 0x50 - invalid */
2285
2286/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
2287FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2288/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
2289FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2290/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
2291FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2292/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2293FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2294
2295/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
2296FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2297/* Opcode 0x66 0x0f 0x52 - invalid */
2298/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
2299FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2300/* Opcode 0xf2 0x0f 0x52 - invalid */
2301
2302/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
2303FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2304/* Opcode 0x66 0x0f 0x53 - invalid */
2305/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
2306FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2307/* Opcode 0xf2 0x0f 0x53 - invalid */
2308
2309/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
2310FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
2311/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
2312FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
2313/* Opcode 0xf3 0x0f 0x54 - invalid */
2314/* Opcode 0xf2 0x0f 0x54 - invalid */
2315
2316/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
2317FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
2318/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
2319FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
2320/* Opcode 0xf3 0x0f 0x55 - invalid */
2321/* Opcode 0xf2 0x0f 0x55 - invalid */
2322
2323/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
2324FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
2325/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
2326FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
2327/* Opcode 0xf3 0x0f 0x56 - invalid */
2328/* Opcode 0xf2 0x0f 0x56 - invalid */
2329
2330/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
2331FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
2332/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
2333FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
2334/* Opcode 0xf3 0x0f 0x57 - invalid */
2335/* Opcode 0xf2 0x0f 0x57 - invalid */
2336
2337/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
2338FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2339/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
2340FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2341/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
2342FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2343/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
2344FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2345
2346/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
2347FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2348/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
2349FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2350/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
2351FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2352/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
2353FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2354
2355/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
2356FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2357/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
2358FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2359/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
2360FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2361/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2362FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2363
2364/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
2365FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2366/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
2367FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2368/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
2369FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2370/* Opcode 0xf2 0x0f 0x5b - invalid */
2371
2372/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
2373FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2374/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
2375FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2376/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
2377FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2378/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
2379FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2380
2381/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
2382FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2383/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
2384FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2385/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
2386FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2387/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
2388FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2389
2390/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
2391FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2392/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
2393FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2394/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
2395FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2396/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
2397FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2398
2399/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
2400FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2401/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
2402FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2403/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
2404FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2405/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
2406FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2407
2408/**
2409 * Common worker for MMX instructions on the forms:
2410 * pxxxx mm1, mm2/mem32
2411 *
2412 * The 2nd operand is the first half of a register, which in the memory case
2413 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2414 * memory accessed for MMX.
2415 *
2416 * Exceptions type 4.
2417 */
2418FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2419{
2420 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2421 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2422 {
2423 /*
2424 * Register, register.
2425 */
2426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2427 IEM_MC_BEGIN(2, 0);
2428 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2429 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2430 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2431 IEM_MC_PREPARE_SSE_USAGE();
2432 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2433 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2434 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2435 IEM_MC_ADVANCE_RIP();
2436 IEM_MC_END();
2437 }
2438 else
2439 {
2440 /*
2441 * Register, memory.
2442 */
2443 IEM_MC_BEGIN(2, 2);
2444 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2445 IEM_MC_LOCAL(uint64_t, uSrc);
2446 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2448
2449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2451 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2452 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2453
2454 IEM_MC_PREPARE_SSE_USAGE();
2455 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2456 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2457
2458 IEM_MC_ADVANCE_RIP();
2459 IEM_MC_END();
2460 }
2461 return VINF_SUCCESS;
2462}
2463
2464
2465/**
2466 * Common worker for SSE2 instructions on the forms:
2467 * pxxxx xmm1, xmm2/mem128
2468 *
2469 * The 2nd operand is the first half of a register, which in the memory case
2470 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2471 * memory accessed for MMX.
2472 *
2473 * Exceptions type 4.
2474 */
2475FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2476{
2477 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2478 if (!pImpl->pfnU64)
2479 return IEMOP_RAISE_INVALID_OPCODE();
2480 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2481 {
2482 /*
2483 * Register, register.
2484 */
2485 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2486 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2488 IEM_MC_BEGIN(2, 0);
2489 IEM_MC_ARG(uint64_t *, pDst, 0);
2490 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2491 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2492 IEM_MC_PREPARE_FPU_USAGE();
2493 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2494 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2495 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2496 IEM_MC_ADVANCE_RIP();
2497 IEM_MC_END();
2498 }
2499 else
2500 {
2501 /*
2502 * Register, memory.
2503 */
2504 IEM_MC_BEGIN(2, 2);
2505 IEM_MC_ARG(uint64_t *, pDst, 0);
2506 IEM_MC_LOCAL(uint32_t, uSrc);
2507 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2509
2510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2512 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2513 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2514
2515 IEM_MC_PREPARE_FPU_USAGE();
2516 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2517 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2518
2519 IEM_MC_ADVANCE_RIP();
2520 IEM_MC_END();
2521 }
2522 return VINF_SUCCESS;
2523}
2524
2525
2526/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2527FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2528{
2529 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2530 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2531}
2532
2533/** Opcode 0x66 0x0f 0x60 - vpunpcklbw Vx, Hx, W */
2534FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2535{
2536 IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
2537 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2538}
2539
2540/* Opcode 0xf3 0x0f 0x60 - invalid */
2541
2542
2543/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2544FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2545{
2546 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2547 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2548}
2549
2550/** Opcode 0x66 0x0f 0x61 - vpunpcklwd Vx, Hx, Wx */
2551FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2552{
2553 IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
2554 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2555}
2556
2557/* Opcode 0xf3 0x0f 0x61 - invalid */
2558
2559
2560/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2561FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2562{
2563 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2564 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2565}
2566
2567/** Opcode 0x66 0x0f 0x62 - vpunpckldq Vx, Hx, Wx */
2568FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2569{
2570 IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
2571 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2572}
2573
2574/* Opcode 0xf3 0x0f 0x62 - invalid */
2575
2576
2577
2578/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2579FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2580/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2581FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2582/* Opcode 0xf3 0x0f 0x63 - invalid */
2583
2584/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2585FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2586/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2587FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2588/* Opcode 0xf3 0x0f 0x64 - invalid */
2589
2590/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2591FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2592/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2593FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2594/* Opcode 0xf3 0x0f 0x65 - invalid */
2595
2596/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2597FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2598/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2599FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2600/* Opcode 0xf3 0x0f 0x66 - invalid */
2601
2602/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2603FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2604/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2605FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2606/* Opcode 0xf3 0x0f 0x67 - invalid */
2607
2608
2609/**
2610 * Common worker for MMX instructions on the form:
2611 * pxxxx mm1, mm2/mem64
2612 *
2613 * The 2nd operand is the second half of a register, which in the memory case
2614 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2615 * where it may read the full 128 bits or only the upper 64 bits.
2616 *
2617 * Exceptions type 4.
2618 */
2619FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2620{
2621 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2622 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2623 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2624 {
2625 /*
2626 * Register, register.
2627 */
2628 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2629 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2631 IEM_MC_BEGIN(2, 0);
2632 IEM_MC_ARG(uint64_t *, pDst, 0);
2633 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2634 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2635 IEM_MC_PREPARE_FPU_USAGE();
2636 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2637 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2638 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2639 IEM_MC_ADVANCE_RIP();
2640 IEM_MC_END();
2641 }
2642 else
2643 {
2644 /*
2645 * Register, memory.
2646 */
2647 IEM_MC_BEGIN(2, 2);
2648 IEM_MC_ARG(uint64_t *, pDst, 0);
2649 IEM_MC_LOCAL(uint64_t, uSrc);
2650 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2652
2653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2655 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2656 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2657
2658 IEM_MC_PREPARE_FPU_USAGE();
2659 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2660 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2661
2662 IEM_MC_ADVANCE_RIP();
2663 IEM_MC_END();
2664 }
2665 return VINF_SUCCESS;
2666}
2667
2668
2669/**
2670 * Common worker for SSE2 instructions on the form:
2671 * pxxxx xmm1, xmm2/mem128
2672 *
2673 * The 2nd operand is the second half of a register, which in the memory case
2674 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2675 * where it may read the full 128 bits or only the upper 64 bits.
2676 *
2677 * Exceptions type 4.
2678 */
2679FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2680{
2681 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2682 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2683 {
2684 /*
2685 * Register, register.
2686 */
2687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2688 IEM_MC_BEGIN(2, 0);
2689 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2690 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2691 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2692 IEM_MC_PREPARE_SSE_USAGE();
2693 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2694 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2695 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2696 IEM_MC_ADVANCE_RIP();
2697 IEM_MC_END();
2698 }
2699 else
2700 {
2701 /*
2702 * Register, memory.
2703 */
2704 IEM_MC_BEGIN(2, 2);
2705 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2706 IEM_MC_LOCAL(RTUINT128U, uSrc);
2707 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
2708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2709
2710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2712 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2713 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2714
2715 IEM_MC_PREPARE_SSE_USAGE();
2716 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2717 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2718
2719 IEM_MC_ADVANCE_RIP();
2720 IEM_MC_END();
2721 }
2722 return VINF_SUCCESS;
2723}
2724
2725
2726/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2727FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2728{
2729 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2730 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2731}
2732
2733/** Opcode 0x66 0x0f 0x68 - vpunpckhbw Vx, Hx, Wx */
2734FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2735{
2736 IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx");
2737 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2738}
2739/* Opcode 0xf3 0x0f 0x68 - invalid */
2740
2741
2742/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2743FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2744{
2745 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2746 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2747}
2748
2749/** Opcode 0x66 0x0f 0x69 - vpunpckhwd Vx, Hx, Wx */
2750FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2751{
2752 IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx");
2753 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2754
2755}
2756/* Opcode 0xf3 0x0f 0x69 - invalid */
2757
2758
2759/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2760FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2761{
2762 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2763 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2764}
2765
2766/** Opcode 0x66 0x0f 0x6a - vpunpckhdq Vx, Hx, W */
2767FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2768{
2769 IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W");
2770 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2771}
2772/* Opcode 0xf3 0x0f 0x6a - invalid */
2773
2774
2775/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2776FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2777/** Opcode 0x66 0x0f 0x6b - vpackssdw Vx, Hx, Wx */
2778FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx);
2779/* Opcode 0xf3 0x0f 0x6b - invalid */
2780
2781
2782/* Opcode 0x0f 0x6c - invalid */
2783
2784/** Opcode 0x66 0x0f 0x6c - vpunpcklqdq Vx, Hx, Wx */
2785FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2786{
2787 IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
2788 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2789}
2790
2791/* Opcode 0xf3 0x0f 0x6c - invalid */
2792/* Opcode 0xf2 0x0f 0x6c - invalid */
2793
2794
2795/* Opcode 0x0f 0x6d - invalid */
2796
2797/** Opcode 0x66 0x0f 0x6d - vpunpckhqdq Vx, Hx, W */
2798FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
2799{
2800 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2801 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2802}
2803
2804/* Opcode 0xf3 0x0f 0x6d - invalid */
2805
2806
2807/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
2808FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
2809{
2810 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2811 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2812 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2813 else
2814 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2815 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2816 {
2817 /* MMX, greg */
2818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2819 IEM_MC_BEGIN(0, 1);
2820 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2821 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2822 IEM_MC_LOCAL(uint64_t, u64Tmp);
2823 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2824 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2825 else
2826 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2827 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2828 IEM_MC_ADVANCE_RIP();
2829 IEM_MC_END();
2830 }
2831 else
2832 {
2833 /* MMX, [mem] */
2834 IEM_MC_BEGIN(0, 2);
2835 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2836 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2839 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2840 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2841 {
2842 IEM_MC_LOCAL(uint64_t, u64Tmp);
2843 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2844 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2845 }
2846 else
2847 {
2848 IEM_MC_LOCAL(uint32_t, u32Tmp);
2849 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2850 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2851 }
2852 IEM_MC_ADVANCE_RIP();
2853 IEM_MC_END();
2854 }
2855 return VINF_SUCCESS;
2856}
2857
2858/** Opcode 0x66 0x0f 0x6e - vmovd/q Vy, Ey */
2859FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
2860{
2861 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2862 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2863 IEMOP_MNEMONIC(vmovdq_Wq_Eq, "vmovq Wq,Eq");
2864 else
2865 IEMOP_MNEMONIC(vmovdq_Wd_Ed, "vmovd Wd,Ed");
2866 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2867 {
2868 /* XMM, greg*/
2869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2870 IEM_MC_BEGIN(0, 1);
2871 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2872 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2873 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2874 {
2875 IEM_MC_LOCAL(uint64_t, u64Tmp);
2876 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2877 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2878 }
2879 else
2880 {
2881 IEM_MC_LOCAL(uint32_t, u32Tmp);
2882 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2883 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2884 }
2885 IEM_MC_ADVANCE_RIP();
2886 IEM_MC_END();
2887 }
2888 else
2889 {
2890 /* XMM, [mem] */
2891 IEM_MC_BEGIN(0, 2);
2892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2893 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2896 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2897 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2898 {
2899 IEM_MC_LOCAL(uint64_t, u64Tmp);
2900 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2901 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2902 }
2903 else
2904 {
2905 IEM_MC_LOCAL(uint32_t, u32Tmp);
2906 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2907 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2908 }
2909 IEM_MC_ADVANCE_RIP();
2910 IEM_MC_END();
2911 }
2912 return VINF_SUCCESS;
2913}
2914
2915/* Opcode 0xf3 0x0f 0x6e - invalid */
2916
2917
2918/** Opcode 0x0f 0x6f - movq Pq, Qq */
2919FNIEMOP_DEF(iemOp_movq_Pq_Qq)
2920{
2921 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2922 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
2923 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2924 {
2925 /*
2926 * Register, register.
2927 */
2928 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2929 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2931 IEM_MC_BEGIN(0, 1);
2932 IEM_MC_LOCAL(uint64_t, u64Tmp);
2933 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2934 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2935 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2936 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2937 IEM_MC_ADVANCE_RIP();
2938 IEM_MC_END();
2939 }
2940 else
2941 {
2942 /*
2943 * Register, memory.
2944 */
2945 IEM_MC_BEGIN(0, 2);
2946 IEM_MC_LOCAL(uint64_t, u64Tmp);
2947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2948
2949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2951 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2952 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2953 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2954 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2955
2956 IEM_MC_ADVANCE_RIP();
2957 IEM_MC_END();
2958 }
2959 return VINF_SUCCESS;
2960}
2961
2962/** Opcode 0x66 0x0f 0x6f - vmovdqa Vx, Wx */
2963FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
2964{
2965 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2966 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
2967 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2968 {
2969 /*
2970 * Register, register.
2971 */
2972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2973 IEM_MC_BEGIN(0, 0);
2974 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2975 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2976 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2977 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2978 IEM_MC_ADVANCE_RIP();
2979 IEM_MC_END();
2980 }
2981 else
2982 {
2983 /*
2984 * Register, memory.
2985 */
2986 IEM_MC_BEGIN(0, 2);
2987 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
2988 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2989
2990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2992 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2993 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2994 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2995 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2996
2997 IEM_MC_ADVANCE_RIP();
2998 IEM_MC_END();
2999 }
3000 return VINF_SUCCESS;
3001}
3002
3003/** Opcode 0xf3 0x0f 0x6f - vmovdqu Vx, Wx */
3004FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
3005{
3006 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3007 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3008 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3009 {
3010 /*
3011 * Register, register.
3012 */
3013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3014 IEM_MC_BEGIN(0, 0);
3015 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3016 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3017 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3018 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3019 IEM_MC_ADVANCE_RIP();
3020 IEM_MC_END();
3021 }
3022 else
3023 {
3024 /*
3025 * Register, memory.
3026 */
3027 IEM_MC_BEGIN(0, 2);
3028 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3029 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3030
3031 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3033 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3034 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3035 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3036 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3037
3038 IEM_MC_ADVANCE_RIP();
3039 IEM_MC_END();
3040 }
3041 return VINF_SUCCESS;
3042}
3043
3044
3045/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3046FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3047{
3048 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3049 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3050 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3051 {
3052 /*
3053 * Register, register.
3054 */
3055 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3057
3058 IEM_MC_BEGIN(3, 0);
3059 IEM_MC_ARG(uint64_t *, pDst, 0);
3060 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3061 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3062 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3063 IEM_MC_PREPARE_FPU_USAGE();
3064 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3065 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3066 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3067 IEM_MC_ADVANCE_RIP();
3068 IEM_MC_END();
3069 }
3070 else
3071 {
3072 /*
3073 * Register, memory.
3074 */
3075 IEM_MC_BEGIN(3, 2);
3076 IEM_MC_ARG(uint64_t *, pDst, 0);
3077 IEM_MC_LOCAL(uint64_t, uSrc);
3078 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3079 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3080
3081 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3082 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3083 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3085 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3086
3087 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3088 IEM_MC_PREPARE_FPU_USAGE();
3089 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3090 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3091
3092 IEM_MC_ADVANCE_RIP();
3093 IEM_MC_END();
3094 }
3095 return VINF_SUCCESS;
3096}
3097
3098/** Opcode 0x66 0x0f 0x70 - vpshufd Vx, Wx, Ib */
3099FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
3100{
3101 IEMOP_MNEMONIC(vpshufd_Vx_Wx_Ib, "vpshufd Vx,Wx,Ib");
3102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3103 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3104 {
3105 /*
3106 * Register, register.
3107 */
3108 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3110
3111 IEM_MC_BEGIN(3, 0);
3112 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3113 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3114 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3115 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3116 IEM_MC_PREPARE_SSE_USAGE();
3117 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3118 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3119 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3120 IEM_MC_ADVANCE_RIP();
3121 IEM_MC_END();
3122 }
3123 else
3124 {
3125 /*
3126 * Register, memory.
3127 */
3128 IEM_MC_BEGIN(3, 2);
3129 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3130 IEM_MC_LOCAL(RTUINT128U, uSrc);
3131 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3133
3134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3135 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3136 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3138 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3139
3140 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3141 IEM_MC_PREPARE_SSE_USAGE();
3142 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3143 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3144
3145 IEM_MC_ADVANCE_RIP();
3146 IEM_MC_END();
3147 }
3148 return VINF_SUCCESS;
3149}
3150
3151/** Opcode 0xf3 0x0f 0x70 - vpshufhw Vx, Wx, Ib */
3152FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
3153{
3154 IEMOP_MNEMONIC(vpshufhw_Vx_Wx_Ib, "vpshufhw Vx,Wx,Ib");
3155 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3156 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3157 {
3158 /*
3159 * Register, register.
3160 */
3161 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3163
3164 IEM_MC_BEGIN(3, 0);
3165 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3166 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3167 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3168 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3169 IEM_MC_PREPARE_SSE_USAGE();
3170 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3171 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3172 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3173 IEM_MC_ADVANCE_RIP();
3174 IEM_MC_END();
3175 }
3176 else
3177 {
3178 /*
3179 * Register, memory.
3180 */
3181 IEM_MC_BEGIN(3, 2);
3182 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3183 IEM_MC_LOCAL(RTUINT128U, uSrc);
3184 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3185 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3186
3187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3188 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3189 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3191 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3192
3193 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3194 IEM_MC_PREPARE_SSE_USAGE();
3195 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3196 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3197
3198 IEM_MC_ADVANCE_RIP();
3199 IEM_MC_END();
3200 }
3201 return VINF_SUCCESS;
3202}
3203
3204/** Opcode 0xf2 0x0f 0x70 - vpshuflw Vx, Wx, Ib */
3205FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
3206{
3207 IEMOP_MNEMONIC(vpshuflw_Vx_Wx_Ib, "vpshuflw Vx,Wx,Ib");
3208 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3209 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3210 {
3211 /*
3212 * Register, register.
3213 */
3214 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3216
3217 IEM_MC_BEGIN(3, 0);
3218 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3219 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3220 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3221 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3222 IEM_MC_PREPARE_SSE_USAGE();
3223 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3224 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3225 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3226 IEM_MC_ADVANCE_RIP();
3227 IEM_MC_END();
3228 }
3229 else
3230 {
3231 /*
3232 * Register, memory.
3233 */
3234 IEM_MC_BEGIN(3, 2);
3235 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3236 IEM_MC_LOCAL(RTUINT128U, uSrc);
3237 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3239
3240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3241 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3242 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3244 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3245
3246 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3247 IEM_MC_PREPARE_SSE_USAGE();
3248 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3249 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3250
3251 IEM_MC_ADVANCE_RIP();
3252 IEM_MC_END();
3253 }
3254 return VINF_SUCCESS;
3255}
3256
3257
3258/** Opcode 0x0f 0x71 11/2. */
3259FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3260
3261/** Opcode 0x66 0x0f 0x71 11/2. */
3262FNIEMOP_STUB_1(iemOp_Grp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm);
3263
3264/** Opcode 0x0f 0x71 11/4. */
3265FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3266
3267/** Opcode 0x66 0x0f 0x71 11/4. */
3268FNIEMOP_STUB_1(iemOp_Grp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm);
3269
3270/** Opcode 0x0f 0x71 11/6. */
3271FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3272
3273/** Opcode 0x66 0x0f 0x71 11/6. */
3274FNIEMOP_STUB_1(iemOp_Grp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm);
3275
3276
3277/**
3278 * Group 12 jump table for register variant.
3279 */
3280IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3281{
3282 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3283 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3284 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3285 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3286 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3287 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3288 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3289 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3290};
3291AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3292
3293
3294/** Opcode 0x0f 0x71. */
3295FNIEMOP_DEF(iemOp_Grp12)
3296{
3297 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3298 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3299 /* register, register */
3300 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3301 + pVCpu->iem.s.idxPrefix], bRm);
3302 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3303}
3304
3305
3306/** Opcode 0x0f 0x72 11/2. */
3307FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3308
3309/** Opcode 0x66 0x0f 0x72 11/2. */
3310FNIEMOP_STUB_1(iemOp_Grp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm);
3311
3312/** Opcode 0x0f 0x72 11/4. */
3313FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3314
3315/** Opcode 0x66 0x0f 0x72 11/4. */
3316FNIEMOP_STUB_1(iemOp_Grp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm);
3317
3318/** Opcode 0x0f 0x72 11/6. */
3319FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3320
3321/** Opcode 0x66 0x0f 0x72 11/6. */
3322FNIEMOP_STUB_1(iemOp_Grp13_vpslld_Hx_Ux_Ib, uint8_t, bRm);
3323
3324
3325/**
3326 * Group 13 jump table for register variant.
3327 */
3328IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
3329{
3330 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3331 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3332 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3333 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3334 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3335 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3336 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3337 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3338};
3339AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
3340
3341/** Opcode 0x0f 0x72. */
3342FNIEMOP_DEF(iemOp_Grp13)
3343{
3344 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3345 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3346 /* register, register */
3347 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3348 + pVCpu->iem.s.idxPrefix], bRm);
3349 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3350}
3351
3352
3353/** Opcode 0x0f 0x73 11/2. */
3354FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3355
3356/** Opcode 0x66 0x0f 0x73 11/2. */
3357FNIEMOP_STUB_1(iemOp_Grp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm);
3358
3359/** Opcode 0x66 0x0f 0x73 11/3. */
3360FNIEMOP_STUB_1(iemOp_Grp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm); //NEXT
3361
3362/** Opcode 0x0f 0x73 11/6. */
3363FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3364
3365/** Opcode 0x66 0x0f 0x73 11/6. */
3366FNIEMOP_STUB_1(iemOp_Grp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm);
3367
3368/** Opcode 0x66 0x0f 0x73 11/7. */
3369FNIEMOP_STUB_1(iemOp_Grp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm); //NEXT
3370
3371/**
3372 * Group 14 jump table for register variant.
3373 */
3374IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
3375{
3376 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3377 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3378 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3379 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3380 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3381 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3382 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3383 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3384};
3385AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
3386
3387
3388/** Opcode 0x0f 0x73. */
3389FNIEMOP_DEF(iemOp_Grp14)
3390{
3391 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3392 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3393 /* register, register */
3394 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3395 + pVCpu->iem.s.idxPrefix], bRm);
3396 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3397}
3398
3399
3400/**
3401 * Common worker for MMX instructions on the form:
3402 * pxxx mm1, mm2/mem64
3403 */
3404FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3405{
3406 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3407 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3408 {
3409 /*
3410 * Register, register.
3411 */
3412 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3413 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3415 IEM_MC_BEGIN(2, 0);
3416 IEM_MC_ARG(uint64_t *, pDst, 0);
3417 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3418 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3419 IEM_MC_PREPARE_FPU_USAGE();
3420 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3421 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3422 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3423 IEM_MC_ADVANCE_RIP();
3424 IEM_MC_END();
3425 }
3426 else
3427 {
3428 /*
3429 * Register, memory.
3430 */
3431 IEM_MC_BEGIN(2, 2);
3432 IEM_MC_ARG(uint64_t *, pDst, 0);
3433 IEM_MC_LOCAL(uint64_t, uSrc);
3434 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3436
3437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3439 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3440 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3441
3442 IEM_MC_PREPARE_FPU_USAGE();
3443 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3444 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3445
3446 IEM_MC_ADVANCE_RIP();
3447 IEM_MC_END();
3448 }
3449 return VINF_SUCCESS;
3450}
3451
3452
3453/**
3454 * Common worker for SSE2 instructions on the forms:
3455 * pxxx xmm1, xmm2/mem128
3456 *
3457 * Proper alignment of the 128-bit operand is enforced.
3458 * Exceptions type 4. SSE2 cpuid checks.
3459 */
3460FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3461{
3462 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3463 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3464 {
3465 /*
3466 * Register, register.
3467 */
3468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3469 IEM_MC_BEGIN(2, 0);
3470 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3471 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3472 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3473 IEM_MC_PREPARE_SSE_USAGE();
3474 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3475 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3476 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3477 IEM_MC_ADVANCE_RIP();
3478 IEM_MC_END();
3479 }
3480 else
3481 {
3482 /*
3483 * Register, memory.
3484 */
3485 IEM_MC_BEGIN(2, 2);
3486 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3487 IEM_MC_LOCAL(RTUINT128U, uSrc);
3488 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3490
3491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3493 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3494 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3495
3496 IEM_MC_PREPARE_SSE_USAGE();
3497 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3498 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3499
3500 IEM_MC_ADVANCE_RIP();
3501 IEM_MC_END();
3502 }
3503 return VINF_SUCCESS;
3504}
3505
3506
3507/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3508FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3509{
3510 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3511 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3512}
3513
3514/** Opcode 0x66 0x0f 0x74 - vpcmpeqb Vx, Hx, Wx */
3515FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3516{
3517 IEMOP_MNEMONIC(vpcmpeqb, "vpcmpeqb");
3518 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3519}
3520
3521/* Opcode 0xf3 0x0f 0x74 - invalid */
3522/* Opcode 0xf2 0x0f 0x74 - invalid */
3523
3524
3525/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3526FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3527{
3528 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3529 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3530}
3531
3532/** Opcode 0x66 0x0f 0x75 - vpcmpeqw Vx, Hx, Wx */
3533FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3534{
3535 IEMOP_MNEMONIC(vpcmpeqw, "vpcmpeqw");
3536 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3537}
3538
3539/* Opcode 0xf3 0x0f 0x75 - invalid */
3540/* Opcode 0xf2 0x0f 0x75 - invalid */
3541
3542
3543/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3544FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3545{
3546 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3547 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3548}
3549
3550/** Opcode 0x66 0x0f 0x76 - vpcmpeqd Vx, Hx, Wx */
3551FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3552{
3553 IEMOP_MNEMONIC(vpcmpeqd, "vpcmpeqd");
3554 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3555}
3556
3557/* Opcode 0xf3 0x0f 0x76 - invalid */
3558/* Opcode 0xf2 0x0f 0x76 - invalid */
3559
3560
3561/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3562FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3563/* Opcode 0x66 0x0f 0x77 - invalid */
3564/* Opcode 0xf3 0x0f 0x77 - invalid */
3565/* Opcode 0xf2 0x0f 0x77 - invalid */
3566
3567/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3568FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3569/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3570FNIEMOP_STUB(iemOp_AmdGrp17);
3571/* Opcode 0xf3 0x0f 0x78 - invalid */
3572/* Opcode 0xf2 0x0f 0x78 - invalid */
3573
3574/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3575FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3576/* Opcode 0x66 0x0f 0x79 - invalid */
3577/* Opcode 0xf3 0x0f 0x79 - invalid */
3578/* Opcode 0xf2 0x0f 0x79 - invalid */
3579
3580/* Opcode 0x0f 0x7a - invalid */
3581/* Opcode 0x66 0x0f 0x7a - invalid */
3582/* Opcode 0xf3 0x0f 0x7a - invalid */
3583/* Opcode 0xf2 0x0f 0x7a - invalid */
3584
3585/* Opcode 0x0f 0x7b - invalid */
3586/* Opcode 0x66 0x0f 0x7b - invalid */
3587/* Opcode 0xf3 0x0f 0x7b - invalid */
3588/* Opcode 0xf2 0x0f 0x7b - invalid */
3589
3590/* Opcode 0x0f 0x7c - invalid */
3591/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3592FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3593/* Opcode 0xf3 0x0f 0x7c - invalid */
3594/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3595FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3596
3597/* Opcode 0x0f 0x7d - invalid */
3598/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3599FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3600/* Opcode 0xf3 0x0f 0x7d - invalid */
3601/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3602FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3603
3604
3605/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
3606FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
3607{
3608 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3609 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3610 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3611 else
3612 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3613 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3614 {
3615 /* greg, MMX */
3616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3617 IEM_MC_BEGIN(0, 1);
3618 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3619 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3620 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3621 {
3622 IEM_MC_LOCAL(uint64_t, u64Tmp);
3623 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3624 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3625 }
3626 else
3627 {
3628 IEM_MC_LOCAL(uint32_t, u32Tmp);
3629 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3630 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3631 }
3632 IEM_MC_ADVANCE_RIP();
3633 IEM_MC_END();
3634 }
3635 else
3636 {
3637 /* [mem], MMX */
3638 IEM_MC_BEGIN(0, 2);
3639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3640 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3643 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3644 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3645 {
3646 IEM_MC_LOCAL(uint64_t, u64Tmp);
3647 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3648 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3649 }
3650 else
3651 {
3652 IEM_MC_LOCAL(uint32_t, u32Tmp);
3653 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3654 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3655 }
3656 IEM_MC_ADVANCE_RIP();
3657 IEM_MC_END();
3658 }
3659 return VINF_SUCCESS;
3660}
3661
3662/** Opcode 0x66 0x0f 0x7e - vmovd_q Ey, Vy */
3663FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
3664{
3665 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3666 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3667 IEMOP_MNEMONIC(vmovq_Eq_Wq, "vmovq Eq,Wq");
3668 else
3669 IEMOP_MNEMONIC(vmovd_Ed_Wd, "vmovd Ed,Wd");
3670 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3671 {
3672 /* greg, XMM */
3673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3674 IEM_MC_BEGIN(0, 1);
3675 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3676 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3677 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3678 {
3679 IEM_MC_LOCAL(uint64_t, u64Tmp);
3680 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3681 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3682 }
3683 else
3684 {
3685 IEM_MC_LOCAL(uint32_t, u32Tmp);
3686 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3687 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3688 }
3689 IEM_MC_ADVANCE_RIP();
3690 IEM_MC_END();
3691 }
3692 else
3693 {
3694 /* [mem], XMM */
3695 IEM_MC_BEGIN(0, 2);
3696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3697 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3700 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3701 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3702 {
3703 IEM_MC_LOCAL(uint64_t, u64Tmp);
3704 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3705 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3706 }
3707 else
3708 {
3709 IEM_MC_LOCAL(uint32_t, u32Tmp);
3710 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3711 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3712 }
3713 IEM_MC_ADVANCE_RIP();
3714 IEM_MC_END();
3715 }
3716 return VINF_SUCCESS;
3717}
3718
3719/** Opcode 0xf3 0x0f 0x7e - vmovq Vq, Wq */
3720FNIEMOP_STUB(iemOp_vmovq_Vq_Wq);
3721/* Opcode 0xf2 0x0f 0x7e - invalid */
3722
3723
3724/** Opcode 0x0f 0x7f - movq Qq, Pq */
3725FNIEMOP_DEF(iemOp_movq_Qq_Pq)
3726{
3727 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3728 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3729 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3730 {
3731 /*
3732 * Register, register.
3733 */
3734 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3735 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3737 IEM_MC_BEGIN(0, 1);
3738 IEM_MC_LOCAL(uint64_t, u64Tmp);
3739 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3740 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3741 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3742 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3743 IEM_MC_ADVANCE_RIP();
3744 IEM_MC_END();
3745 }
3746 else
3747 {
3748 /*
3749 * Register, memory.
3750 */
3751 IEM_MC_BEGIN(0, 2);
3752 IEM_MC_LOCAL(uint64_t, u64Tmp);
3753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3754
3755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3757 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3758 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3759
3760 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3761 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3762
3763 IEM_MC_ADVANCE_RIP();
3764 IEM_MC_END();
3765 }
3766 return VINF_SUCCESS;
3767}
3768
3769/** Opcode 0x66 0x0f 0x7f - vmovdqa Wx,Vx */
3770FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
3771{
3772 IEMOP_MNEMONIC(vmovdqa_Wdq_Vdq, "vmovdqa Wx,Vx");
3773 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3774 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3775 {
3776 /*
3777 * Register, register.
3778 */
3779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3780 IEM_MC_BEGIN(0, 0);
3781 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3782 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3783 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3784 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3785 IEM_MC_ADVANCE_RIP();
3786 IEM_MC_END();
3787 }
3788 else
3789 {
3790 /*
3791 * Register, memory.
3792 */
3793 IEM_MC_BEGIN(0, 2);
3794 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3796
3797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3799 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3800 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3801
3802 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3803 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3804
3805 IEM_MC_ADVANCE_RIP();
3806 IEM_MC_END();
3807 }
3808 return VINF_SUCCESS;
3809}
3810
3811/** Opcode 0xf3 0x0f 0x7f - vmovdqu Wx,Vx */
3812FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
3813{
3814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3815 IEMOP_MNEMONIC(vmovdqu_Wdq_Vdq, "vmovdqu Wx,Vx");
3816 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3817 {
3818 /*
3819 * Register, register.
3820 */
3821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3822 IEM_MC_BEGIN(0, 0);
3823 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3824 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3825 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3826 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3827 IEM_MC_ADVANCE_RIP();
3828 IEM_MC_END();
3829 }
3830 else
3831 {
3832 /*
3833 * Register, memory.
3834 */
3835 IEM_MC_BEGIN(0, 2);
3836 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3838
3839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3841 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3842 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3843
3844 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3845 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3846
3847 IEM_MC_ADVANCE_RIP();
3848 IEM_MC_END();
3849 }
3850 return VINF_SUCCESS;
3851}
3852
3853/* Opcode 0xf2 0x0f 0x7f - invalid */
3854
3855
3856
3857/** Opcode 0x0f 0x80. */
3858FNIEMOP_DEF(iemOp_jo_Jv)
3859{
3860 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3861 IEMOP_HLP_MIN_386();
3862 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3863 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3864 {
3865 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3867
3868 IEM_MC_BEGIN(0, 0);
3869 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3870 IEM_MC_REL_JMP_S16(i16Imm);
3871 } IEM_MC_ELSE() {
3872 IEM_MC_ADVANCE_RIP();
3873 } IEM_MC_ENDIF();
3874 IEM_MC_END();
3875 }
3876 else
3877 {
3878 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3880
3881 IEM_MC_BEGIN(0, 0);
3882 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3883 IEM_MC_REL_JMP_S32(i32Imm);
3884 } IEM_MC_ELSE() {
3885 IEM_MC_ADVANCE_RIP();
3886 } IEM_MC_ENDIF();
3887 IEM_MC_END();
3888 }
3889 return VINF_SUCCESS;
3890}
3891
3892
3893/** Opcode 0x0f 0x81. */
3894FNIEMOP_DEF(iemOp_jno_Jv)
3895{
3896 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3897 IEMOP_HLP_MIN_386();
3898 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3899 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3900 {
3901 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3903
3904 IEM_MC_BEGIN(0, 0);
3905 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3906 IEM_MC_ADVANCE_RIP();
3907 } IEM_MC_ELSE() {
3908 IEM_MC_REL_JMP_S16(i16Imm);
3909 } IEM_MC_ENDIF();
3910 IEM_MC_END();
3911 }
3912 else
3913 {
3914 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3916
3917 IEM_MC_BEGIN(0, 0);
3918 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3919 IEM_MC_ADVANCE_RIP();
3920 } IEM_MC_ELSE() {
3921 IEM_MC_REL_JMP_S32(i32Imm);
3922 } IEM_MC_ENDIF();
3923 IEM_MC_END();
3924 }
3925 return VINF_SUCCESS;
3926}
3927
3928
3929/** Opcode 0x0f 0x82. */
3930FNIEMOP_DEF(iemOp_jc_Jv)
3931{
3932 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3933 IEMOP_HLP_MIN_386();
3934 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3935 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3936 {
3937 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3939
3940 IEM_MC_BEGIN(0, 0);
3941 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3942 IEM_MC_REL_JMP_S16(i16Imm);
3943 } IEM_MC_ELSE() {
3944 IEM_MC_ADVANCE_RIP();
3945 } IEM_MC_ENDIF();
3946 IEM_MC_END();
3947 }
3948 else
3949 {
3950 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3952
3953 IEM_MC_BEGIN(0, 0);
3954 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3955 IEM_MC_REL_JMP_S32(i32Imm);
3956 } IEM_MC_ELSE() {
3957 IEM_MC_ADVANCE_RIP();
3958 } IEM_MC_ENDIF();
3959 IEM_MC_END();
3960 }
3961 return VINF_SUCCESS;
3962}
3963
3964
3965/** Opcode 0x0f 0x83. */
3966FNIEMOP_DEF(iemOp_jnc_Jv)
3967{
3968 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
3969 IEMOP_HLP_MIN_386();
3970 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3971 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3972 {
3973 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3975
3976 IEM_MC_BEGIN(0, 0);
3977 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3978 IEM_MC_ADVANCE_RIP();
3979 } IEM_MC_ELSE() {
3980 IEM_MC_REL_JMP_S16(i16Imm);
3981 } IEM_MC_ENDIF();
3982 IEM_MC_END();
3983 }
3984 else
3985 {
3986 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3988
3989 IEM_MC_BEGIN(0, 0);
3990 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3991 IEM_MC_ADVANCE_RIP();
3992 } IEM_MC_ELSE() {
3993 IEM_MC_REL_JMP_S32(i32Imm);
3994 } IEM_MC_ENDIF();
3995 IEM_MC_END();
3996 }
3997 return VINF_SUCCESS;
3998}
3999
4000
4001/** Opcode 0x0f 0x84. */
4002FNIEMOP_DEF(iemOp_je_Jv)
4003{
4004 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4005 IEMOP_HLP_MIN_386();
4006 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4007 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4008 {
4009 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4011
4012 IEM_MC_BEGIN(0, 0);
4013 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4014 IEM_MC_REL_JMP_S16(i16Imm);
4015 } IEM_MC_ELSE() {
4016 IEM_MC_ADVANCE_RIP();
4017 } IEM_MC_ENDIF();
4018 IEM_MC_END();
4019 }
4020 else
4021 {
4022 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4024
4025 IEM_MC_BEGIN(0, 0);
4026 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4027 IEM_MC_REL_JMP_S32(i32Imm);
4028 } IEM_MC_ELSE() {
4029 IEM_MC_ADVANCE_RIP();
4030 } IEM_MC_ENDIF();
4031 IEM_MC_END();
4032 }
4033 return VINF_SUCCESS;
4034}
4035
4036
4037/** Opcode 0x0f 0x85. */
4038FNIEMOP_DEF(iemOp_jne_Jv)
4039{
4040 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4041 IEMOP_HLP_MIN_386();
4042 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4043 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4044 {
4045 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4047
4048 IEM_MC_BEGIN(0, 0);
4049 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4050 IEM_MC_ADVANCE_RIP();
4051 } IEM_MC_ELSE() {
4052 IEM_MC_REL_JMP_S16(i16Imm);
4053 } IEM_MC_ENDIF();
4054 IEM_MC_END();
4055 }
4056 else
4057 {
4058 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4060
4061 IEM_MC_BEGIN(0, 0);
4062 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4063 IEM_MC_ADVANCE_RIP();
4064 } IEM_MC_ELSE() {
4065 IEM_MC_REL_JMP_S32(i32Imm);
4066 } IEM_MC_ENDIF();
4067 IEM_MC_END();
4068 }
4069 return VINF_SUCCESS;
4070}
4071
4072
4073/** Opcode 0x0f 0x86. */
4074FNIEMOP_DEF(iemOp_jbe_Jv)
4075{
4076 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4077 IEMOP_HLP_MIN_386();
4078 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4079 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4080 {
4081 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4083
4084 IEM_MC_BEGIN(0, 0);
4085 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4086 IEM_MC_REL_JMP_S16(i16Imm);
4087 } IEM_MC_ELSE() {
4088 IEM_MC_ADVANCE_RIP();
4089 } IEM_MC_ENDIF();
4090 IEM_MC_END();
4091 }
4092 else
4093 {
4094 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4096
4097 IEM_MC_BEGIN(0, 0);
4098 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4099 IEM_MC_REL_JMP_S32(i32Imm);
4100 } IEM_MC_ELSE() {
4101 IEM_MC_ADVANCE_RIP();
4102 } IEM_MC_ENDIF();
4103 IEM_MC_END();
4104 }
4105 return VINF_SUCCESS;
4106}
4107
4108
4109/** Opcode 0x0f 0x87. */
4110FNIEMOP_DEF(iemOp_jnbe_Jv)
4111{
4112 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4113 IEMOP_HLP_MIN_386();
4114 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4115 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4116 {
4117 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4119
4120 IEM_MC_BEGIN(0, 0);
4121 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4122 IEM_MC_ADVANCE_RIP();
4123 } IEM_MC_ELSE() {
4124 IEM_MC_REL_JMP_S16(i16Imm);
4125 } IEM_MC_ENDIF();
4126 IEM_MC_END();
4127 }
4128 else
4129 {
4130 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4132
4133 IEM_MC_BEGIN(0, 0);
4134 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4135 IEM_MC_ADVANCE_RIP();
4136 } IEM_MC_ELSE() {
4137 IEM_MC_REL_JMP_S32(i32Imm);
4138 } IEM_MC_ENDIF();
4139 IEM_MC_END();
4140 }
4141 return VINF_SUCCESS;
4142}
4143
4144
4145/** Opcode 0x0f 0x88. */
4146FNIEMOP_DEF(iemOp_js_Jv)
4147{
4148 IEMOP_MNEMONIC(js_Jv, "js Jv");
4149 IEMOP_HLP_MIN_386();
4150 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4151 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4152 {
4153 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4155
4156 IEM_MC_BEGIN(0, 0);
4157 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4158 IEM_MC_REL_JMP_S16(i16Imm);
4159 } IEM_MC_ELSE() {
4160 IEM_MC_ADVANCE_RIP();
4161 } IEM_MC_ENDIF();
4162 IEM_MC_END();
4163 }
4164 else
4165 {
4166 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4168
4169 IEM_MC_BEGIN(0, 0);
4170 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4171 IEM_MC_REL_JMP_S32(i32Imm);
4172 } IEM_MC_ELSE() {
4173 IEM_MC_ADVANCE_RIP();
4174 } IEM_MC_ENDIF();
4175 IEM_MC_END();
4176 }
4177 return VINF_SUCCESS;
4178}
4179
4180
4181/** Opcode 0x0f 0x89. */
4182FNIEMOP_DEF(iemOp_jns_Jv)
4183{
4184 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4185 IEMOP_HLP_MIN_386();
4186 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4187 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4188 {
4189 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4191
4192 IEM_MC_BEGIN(0, 0);
4193 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4194 IEM_MC_ADVANCE_RIP();
4195 } IEM_MC_ELSE() {
4196 IEM_MC_REL_JMP_S16(i16Imm);
4197 } IEM_MC_ENDIF();
4198 IEM_MC_END();
4199 }
4200 else
4201 {
4202 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4204
4205 IEM_MC_BEGIN(0, 0);
4206 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4207 IEM_MC_ADVANCE_RIP();
4208 } IEM_MC_ELSE() {
4209 IEM_MC_REL_JMP_S32(i32Imm);
4210 } IEM_MC_ENDIF();
4211 IEM_MC_END();
4212 }
4213 return VINF_SUCCESS;
4214}
4215
4216
4217/** Opcode 0x0f 0x8a. */
4218FNIEMOP_DEF(iemOp_jp_Jv)
4219{
4220 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4221 IEMOP_HLP_MIN_386();
4222 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4223 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4224 {
4225 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4227
4228 IEM_MC_BEGIN(0, 0);
4229 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4230 IEM_MC_REL_JMP_S16(i16Imm);
4231 } IEM_MC_ELSE() {
4232 IEM_MC_ADVANCE_RIP();
4233 } IEM_MC_ENDIF();
4234 IEM_MC_END();
4235 }
4236 else
4237 {
4238 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4240
4241 IEM_MC_BEGIN(0, 0);
4242 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4243 IEM_MC_REL_JMP_S32(i32Imm);
4244 } IEM_MC_ELSE() {
4245 IEM_MC_ADVANCE_RIP();
4246 } IEM_MC_ENDIF();
4247 IEM_MC_END();
4248 }
4249 return VINF_SUCCESS;
4250}
4251
4252
4253/** Opcode 0x0f 0x8b. */
4254FNIEMOP_DEF(iemOp_jnp_Jv)
4255{
4256 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4257 IEMOP_HLP_MIN_386();
4258 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4259 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4260 {
4261 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4263
4264 IEM_MC_BEGIN(0, 0);
4265 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4266 IEM_MC_ADVANCE_RIP();
4267 } IEM_MC_ELSE() {
4268 IEM_MC_REL_JMP_S16(i16Imm);
4269 } IEM_MC_ENDIF();
4270 IEM_MC_END();
4271 }
4272 else
4273 {
4274 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4276
4277 IEM_MC_BEGIN(0, 0);
4278 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4279 IEM_MC_ADVANCE_RIP();
4280 } IEM_MC_ELSE() {
4281 IEM_MC_REL_JMP_S32(i32Imm);
4282 } IEM_MC_ENDIF();
4283 IEM_MC_END();
4284 }
4285 return VINF_SUCCESS;
4286}
4287
4288
4289/** Opcode 0x0f 0x8c. */
4290FNIEMOP_DEF(iemOp_jl_Jv)
4291{
4292 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4293 IEMOP_HLP_MIN_386();
4294 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4295 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4296 {
4297 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4299
4300 IEM_MC_BEGIN(0, 0);
4301 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4302 IEM_MC_REL_JMP_S16(i16Imm);
4303 } IEM_MC_ELSE() {
4304 IEM_MC_ADVANCE_RIP();
4305 } IEM_MC_ENDIF();
4306 IEM_MC_END();
4307 }
4308 else
4309 {
4310 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4312
4313 IEM_MC_BEGIN(0, 0);
4314 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4315 IEM_MC_REL_JMP_S32(i32Imm);
4316 } IEM_MC_ELSE() {
4317 IEM_MC_ADVANCE_RIP();
4318 } IEM_MC_ENDIF();
4319 IEM_MC_END();
4320 }
4321 return VINF_SUCCESS;
4322}
4323
4324
4325/** Opcode 0x0f 0x8d. */
4326FNIEMOP_DEF(iemOp_jnl_Jv)
4327{
4328 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4329 IEMOP_HLP_MIN_386();
4330 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4331 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4332 {
4333 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4335
4336 IEM_MC_BEGIN(0, 0);
4337 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4338 IEM_MC_ADVANCE_RIP();
4339 } IEM_MC_ELSE() {
4340 IEM_MC_REL_JMP_S16(i16Imm);
4341 } IEM_MC_ENDIF();
4342 IEM_MC_END();
4343 }
4344 else
4345 {
4346 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4348
4349 IEM_MC_BEGIN(0, 0);
4350 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4351 IEM_MC_ADVANCE_RIP();
4352 } IEM_MC_ELSE() {
4353 IEM_MC_REL_JMP_S32(i32Imm);
4354 } IEM_MC_ENDIF();
4355 IEM_MC_END();
4356 }
4357 return VINF_SUCCESS;
4358}
4359
4360
4361/** Opcode 0x0f 0x8e. */
4362FNIEMOP_DEF(iemOp_jle_Jv)
4363{
4364 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4365 IEMOP_HLP_MIN_386();
4366 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4367 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4368 {
4369 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4371
4372 IEM_MC_BEGIN(0, 0);
4373 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4374 IEM_MC_REL_JMP_S16(i16Imm);
4375 } IEM_MC_ELSE() {
4376 IEM_MC_ADVANCE_RIP();
4377 } IEM_MC_ENDIF();
4378 IEM_MC_END();
4379 }
4380 else
4381 {
4382 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4384
4385 IEM_MC_BEGIN(0, 0);
4386 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4387 IEM_MC_REL_JMP_S32(i32Imm);
4388 } IEM_MC_ELSE() {
4389 IEM_MC_ADVANCE_RIP();
4390 } IEM_MC_ENDIF();
4391 IEM_MC_END();
4392 }
4393 return VINF_SUCCESS;
4394}
4395
4396
4397/** Opcode 0x0f 0x8f. */
4398FNIEMOP_DEF(iemOp_jnle_Jv)
4399{
4400 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4401 IEMOP_HLP_MIN_386();
4402 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4403 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4404 {
4405 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4407
4408 IEM_MC_BEGIN(0, 0);
4409 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4410 IEM_MC_ADVANCE_RIP();
4411 } IEM_MC_ELSE() {
4412 IEM_MC_REL_JMP_S16(i16Imm);
4413 } IEM_MC_ENDIF();
4414 IEM_MC_END();
4415 }
4416 else
4417 {
4418 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4420
4421 IEM_MC_BEGIN(0, 0);
4422 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4423 IEM_MC_ADVANCE_RIP();
4424 } IEM_MC_ELSE() {
4425 IEM_MC_REL_JMP_S32(i32Imm);
4426 } IEM_MC_ENDIF();
4427 IEM_MC_END();
4428 }
4429 return VINF_SUCCESS;
4430}
4431
4432
4433/** Opcode 0x0f 0x90. */
4434FNIEMOP_DEF(iemOp_seto_Eb)
4435{
4436 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4437 IEMOP_HLP_MIN_386();
4438 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4439
4440 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4441 * any way. AMD says it's "unused", whatever that means. We're
4442 * ignoring for now. */
4443 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4444 {
4445 /* register target */
4446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4447 IEM_MC_BEGIN(0, 0);
4448 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4449 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4450 } IEM_MC_ELSE() {
4451 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4452 } IEM_MC_ENDIF();
4453 IEM_MC_ADVANCE_RIP();
4454 IEM_MC_END();
4455 }
4456 else
4457 {
4458 /* memory target */
4459 IEM_MC_BEGIN(0, 1);
4460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4463 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4464 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4465 } IEM_MC_ELSE() {
4466 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4467 } IEM_MC_ENDIF();
4468 IEM_MC_ADVANCE_RIP();
4469 IEM_MC_END();
4470 }
4471 return VINF_SUCCESS;
4472}
4473
4474
4475/** Opcode 0x0f 0x91. */
4476FNIEMOP_DEF(iemOp_setno_Eb)
4477{
4478 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4479 IEMOP_HLP_MIN_386();
4480 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4481
4482 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4483 * any way. AMD says it's "unused", whatever that means. We're
4484 * ignoring for now. */
4485 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4486 {
4487 /* register target */
4488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4489 IEM_MC_BEGIN(0, 0);
4490 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4491 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4492 } IEM_MC_ELSE() {
4493 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4494 } IEM_MC_ENDIF();
4495 IEM_MC_ADVANCE_RIP();
4496 IEM_MC_END();
4497 }
4498 else
4499 {
4500 /* memory target */
4501 IEM_MC_BEGIN(0, 1);
4502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4505 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4506 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4507 } IEM_MC_ELSE() {
4508 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4509 } IEM_MC_ENDIF();
4510 IEM_MC_ADVANCE_RIP();
4511 IEM_MC_END();
4512 }
4513 return VINF_SUCCESS;
4514}
4515
4516
4517/** Opcode 0x0f 0x92. */
4518FNIEMOP_DEF(iemOp_setc_Eb)
4519{
4520 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4521 IEMOP_HLP_MIN_386();
4522 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4523
4524 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4525 * any way. AMD says it's "unused", whatever that means. We're
4526 * ignoring for now. */
4527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4528 {
4529 /* register target */
4530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4531 IEM_MC_BEGIN(0, 0);
4532 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4533 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4534 } IEM_MC_ELSE() {
4535 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4536 } IEM_MC_ENDIF();
4537 IEM_MC_ADVANCE_RIP();
4538 IEM_MC_END();
4539 }
4540 else
4541 {
4542 /* memory target */
4543 IEM_MC_BEGIN(0, 1);
4544 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4547 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4548 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4549 } IEM_MC_ELSE() {
4550 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4551 } IEM_MC_ENDIF();
4552 IEM_MC_ADVANCE_RIP();
4553 IEM_MC_END();
4554 }
4555 return VINF_SUCCESS;
4556}
4557
4558
4559/** Opcode 0x0f 0x93. */
4560FNIEMOP_DEF(iemOp_setnc_Eb)
4561{
4562 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4563 IEMOP_HLP_MIN_386();
4564 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4565
4566 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4567 * any way. AMD says it's "unused", whatever that means. We're
4568 * ignoring for now. */
4569 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4570 {
4571 /* register target */
4572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4573 IEM_MC_BEGIN(0, 0);
4574 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4575 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4576 } IEM_MC_ELSE() {
4577 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4578 } IEM_MC_ENDIF();
4579 IEM_MC_ADVANCE_RIP();
4580 IEM_MC_END();
4581 }
4582 else
4583 {
4584 /* memory target */
4585 IEM_MC_BEGIN(0, 1);
4586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4587 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4589 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4590 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4591 } IEM_MC_ELSE() {
4592 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4593 } IEM_MC_ENDIF();
4594 IEM_MC_ADVANCE_RIP();
4595 IEM_MC_END();
4596 }
4597 return VINF_SUCCESS;
4598}
4599
4600
4601/** Opcode 0x0f 0x94. */
4602FNIEMOP_DEF(iemOp_sete_Eb)
4603{
4604 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4605 IEMOP_HLP_MIN_386();
4606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4607
4608 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4609 * any way. AMD says it's "unused", whatever that means. We're
4610 * ignoring for now. */
4611 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4612 {
4613 /* register target */
4614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4615 IEM_MC_BEGIN(0, 0);
4616 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4617 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4618 } IEM_MC_ELSE() {
4619 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4620 } IEM_MC_ENDIF();
4621 IEM_MC_ADVANCE_RIP();
4622 IEM_MC_END();
4623 }
4624 else
4625 {
4626 /* memory target */
4627 IEM_MC_BEGIN(0, 1);
4628 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4631 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4632 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4633 } IEM_MC_ELSE() {
4634 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4635 } IEM_MC_ENDIF();
4636 IEM_MC_ADVANCE_RIP();
4637 IEM_MC_END();
4638 }
4639 return VINF_SUCCESS;
4640}
4641
4642
4643/** Opcode 0x0f 0x95. */
4644FNIEMOP_DEF(iemOp_setne_Eb)
4645{
4646 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4647 IEMOP_HLP_MIN_386();
4648 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4649
4650 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4651 * any way. AMD says it's "unused", whatever that means. We're
4652 * ignoring for now. */
4653 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4654 {
4655 /* register target */
4656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4657 IEM_MC_BEGIN(0, 0);
4658 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4659 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4660 } IEM_MC_ELSE() {
4661 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4662 } IEM_MC_ENDIF();
4663 IEM_MC_ADVANCE_RIP();
4664 IEM_MC_END();
4665 }
4666 else
4667 {
4668 /* memory target */
4669 IEM_MC_BEGIN(0, 1);
4670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4673 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4674 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4675 } IEM_MC_ELSE() {
4676 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4677 } IEM_MC_ENDIF();
4678 IEM_MC_ADVANCE_RIP();
4679 IEM_MC_END();
4680 }
4681 return VINF_SUCCESS;
4682}
4683
4684
4685/** Opcode 0x0f 0x96. */
4686FNIEMOP_DEF(iemOp_setbe_Eb)
4687{
4688 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4689 IEMOP_HLP_MIN_386();
4690 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4691
4692 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4693 * any way. AMD says it's "unused", whatever that means. We're
4694 * ignoring for now. */
4695 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4696 {
4697 /* register target */
4698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4699 IEM_MC_BEGIN(0, 0);
4700 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4701 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4702 } IEM_MC_ELSE() {
4703 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4704 } IEM_MC_ENDIF();
4705 IEM_MC_ADVANCE_RIP();
4706 IEM_MC_END();
4707 }
4708 else
4709 {
4710 /* memory target */
4711 IEM_MC_BEGIN(0, 1);
4712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4715 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4716 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4717 } IEM_MC_ELSE() {
4718 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4719 } IEM_MC_ENDIF();
4720 IEM_MC_ADVANCE_RIP();
4721 IEM_MC_END();
4722 }
4723 return VINF_SUCCESS;
4724}
4725
4726
4727/** Opcode 0x0f 0x97. */
4728FNIEMOP_DEF(iemOp_setnbe_Eb)
4729{
4730 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4731 IEMOP_HLP_MIN_386();
4732 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4733
4734 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4735 * any way. AMD says it's "unused", whatever that means. We're
4736 * ignoring for now. */
4737 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4738 {
4739 /* register target */
4740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4741 IEM_MC_BEGIN(0, 0);
4742 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4743 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4744 } IEM_MC_ELSE() {
4745 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4746 } IEM_MC_ENDIF();
4747 IEM_MC_ADVANCE_RIP();
4748 IEM_MC_END();
4749 }
4750 else
4751 {
4752 /* memory target */
4753 IEM_MC_BEGIN(0, 1);
4754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4757 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4758 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4759 } IEM_MC_ELSE() {
4760 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4761 } IEM_MC_ENDIF();
4762 IEM_MC_ADVANCE_RIP();
4763 IEM_MC_END();
4764 }
4765 return VINF_SUCCESS;
4766}
4767
4768
4769/** Opcode 0x0f 0x98. */
4770FNIEMOP_DEF(iemOp_sets_Eb)
4771{
4772 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4773 IEMOP_HLP_MIN_386();
4774 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4775
4776 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4777 * any way. AMD says it's "unused", whatever that means. We're
4778 * ignoring for now. */
4779 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4780 {
4781 /* register target */
4782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4783 IEM_MC_BEGIN(0, 0);
4784 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4785 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4786 } IEM_MC_ELSE() {
4787 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4788 } IEM_MC_ENDIF();
4789 IEM_MC_ADVANCE_RIP();
4790 IEM_MC_END();
4791 }
4792 else
4793 {
4794 /* memory target */
4795 IEM_MC_BEGIN(0, 1);
4796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4799 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4800 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4801 } IEM_MC_ELSE() {
4802 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4803 } IEM_MC_ENDIF();
4804 IEM_MC_ADVANCE_RIP();
4805 IEM_MC_END();
4806 }
4807 return VINF_SUCCESS;
4808}
4809
4810
4811/** Opcode 0x0f 0x99. */
4812FNIEMOP_DEF(iemOp_setns_Eb)
4813{
4814 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4815 IEMOP_HLP_MIN_386();
4816 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4817
4818 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4819 * any way. AMD says it's "unused", whatever that means. We're
4820 * ignoring for now. */
4821 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4822 {
4823 /* register target */
4824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4825 IEM_MC_BEGIN(0, 0);
4826 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4827 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4828 } IEM_MC_ELSE() {
4829 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4830 } IEM_MC_ENDIF();
4831 IEM_MC_ADVANCE_RIP();
4832 IEM_MC_END();
4833 }
4834 else
4835 {
4836 /* memory target */
4837 IEM_MC_BEGIN(0, 1);
4838 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4841 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4842 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4843 } IEM_MC_ELSE() {
4844 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4845 } IEM_MC_ENDIF();
4846 IEM_MC_ADVANCE_RIP();
4847 IEM_MC_END();
4848 }
4849 return VINF_SUCCESS;
4850}
4851
4852
4853/** Opcode 0x0f 0x9a. */
4854FNIEMOP_DEF(iemOp_setp_Eb)
4855{
4856 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4857 IEMOP_HLP_MIN_386();
4858 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4859
4860 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4861 * any way. AMD says it's "unused", whatever that means. We're
4862 * ignoring for now. */
4863 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4864 {
4865 /* register target */
4866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4867 IEM_MC_BEGIN(0, 0);
4868 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4869 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4870 } IEM_MC_ELSE() {
4871 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4872 } IEM_MC_ENDIF();
4873 IEM_MC_ADVANCE_RIP();
4874 IEM_MC_END();
4875 }
4876 else
4877 {
4878 /* memory target */
4879 IEM_MC_BEGIN(0, 1);
4880 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4881 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4883 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4884 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4885 } IEM_MC_ELSE() {
4886 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4887 } IEM_MC_ENDIF();
4888 IEM_MC_ADVANCE_RIP();
4889 IEM_MC_END();
4890 }
4891 return VINF_SUCCESS;
4892}
4893
4894
4895/** Opcode 0x0f 0x9b. */
4896FNIEMOP_DEF(iemOp_setnp_Eb)
4897{
4898 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4899 IEMOP_HLP_MIN_386();
4900 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4901
4902 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4903 * any way. AMD says it's "unused", whatever that means. We're
4904 * ignoring for now. */
4905 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4906 {
4907 /* register target */
4908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4909 IEM_MC_BEGIN(0, 0);
4910 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4911 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4912 } IEM_MC_ELSE() {
4913 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4914 } IEM_MC_ENDIF();
4915 IEM_MC_ADVANCE_RIP();
4916 IEM_MC_END();
4917 }
4918 else
4919 {
4920 /* memory target */
4921 IEM_MC_BEGIN(0, 1);
4922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4925 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4926 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4927 } IEM_MC_ELSE() {
4928 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4929 } IEM_MC_ENDIF();
4930 IEM_MC_ADVANCE_RIP();
4931 IEM_MC_END();
4932 }
4933 return VINF_SUCCESS;
4934}
4935
4936
4937/** Opcode 0x0f 0x9c. */
4938FNIEMOP_DEF(iemOp_setl_Eb)
4939{
4940 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4941 IEMOP_HLP_MIN_386();
4942 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4943
4944 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4945 * any way. AMD says it's "unused", whatever that means. We're
4946 * ignoring for now. */
4947 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4948 {
4949 /* register target */
4950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4951 IEM_MC_BEGIN(0, 0);
4952 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4953 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4954 } IEM_MC_ELSE() {
4955 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4956 } IEM_MC_ENDIF();
4957 IEM_MC_ADVANCE_RIP();
4958 IEM_MC_END();
4959 }
4960 else
4961 {
4962 /* memory target */
4963 IEM_MC_BEGIN(0, 1);
4964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4965 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4967 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4968 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4969 } IEM_MC_ELSE() {
4970 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4971 } IEM_MC_ENDIF();
4972 IEM_MC_ADVANCE_RIP();
4973 IEM_MC_END();
4974 }
4975 return VINF_SUCCESS;
4976}
4977
4978
4979/** Opcode 0x0f 0x9d. */
4980FNIEMOP_DEF(iemOp_setnl_Eb)
4981{
4982 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
4983 IEMOP_HLP_MIN_386();
4984 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4985
4986 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4987 * any way. AMD says it's "unused", whatever that means. We're
4988 * ignoring for now. */
4989 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4990 {
4991 /* register target */
4992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4993 IEM_MC_BEGIN(0, 0);
4994 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4995 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4996 } IEM_MC_ELSE() {
4997 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4998 } IEM_MC_ENDIF();
4999 IEM_MC_ADVANCE_RIP();
5000 IEM_MC_END();
5001 }
5002 else
5003 {
5004 /* memory target */
5005 IEM_MC_BEGIN(0, 1);
5006 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5007 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5009 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5010 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5011 } IEM_MC_ELSE() {
5012 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5013 } IEM_MC_ENDIF();
5014 IEM_MC_ADVANCE_RIP();
5015 IEM_MC_END();
5016 }
5017 return VINF_SUCCESS;
5018}
5019
5020
5021/** Opcode 0x0f 0x9e. */
5022FNIEMOP_DEF(iemOp_setle_Eb)
5023{
5024 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5025 IEMOP_HLP_MIN_386();
5026 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5027
5028 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5029 * any way. AMD says it's "unused", whatever that means. We're
5030 * ignoring for now. */
5031 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5032 {
5033 /* register target */
5034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5035 IEM_MC_BEGIN(0, 0);
5036 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5037 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5038 } IEM_MC_ELSE() {
5039 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5040 } IEM_MC_ENDIF();
5041 IEM_MC_ADVANCE_RIP();
5042 IEM_MC_END();
5043 }
5044 else
5045 {
5046 /* memory target */
5047 IEM_MC_BEGIN(0, 1);
5048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5051 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5052 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5053 } IEM_MC_ELSE() {
5054 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5055 } IEM_MC_ENDIF();
5056 IEM_MC_ADVANCE_RIP();
5057 IEM_MC_END();
5058 }
5059 return VINF_SUCCESS;
5060}
5061
5062
5063/** Opcode 0x0f 0x9f. */
5064FNIEMOP_DEF(iemOp_setnle_Eb)
5065{
5066 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5067 IEMOP_HLP_MIN_386();
5068 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5069
5070 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5071 * any way. AMD says it's "unused", whatever that means. We're
5072 * ignoring for now. */
5073 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5074 {
5075 /* register target */
5076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5077 IEM_MC_BEGIN(0, 0);
5078 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5079 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5080 } IEM_MC_ELSE() {
5081 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5082 } IEM_MC_ENDIF();
5083 IEM_MC_ADVANCE_RIP();
5084 IEM_MC_END();
5085 }
5086 else
5087 {
5088 /* memory target */
5089 IEM_MC_BEGIN(0, 1);
5090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5093 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5094 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5095 } IEM_MC_ELSE() {
5096 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5097 } IEM_MC_ENDIF();
5098 IEM_MC_ADVANCE_RIP();
5099 IEM_MC_END();
5100 }
5101 return VINF_SUCCESS;
5102}
5103
5104
5105/**
5106 * Common 'push segment-register' helper.
5107 */
5108FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5109{
5110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5111 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5112 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5113
5114 switch (pVCpu->iem.s.enmEffOpSize)
5115 {
5116 case IEMMODE_16BIT:
5117 IEM_MC_BEGIN(0, 1);
5118 IEM_MC_LOCAL(uint16_t, u16Value);
5119 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5120 IEM_MC_PUSH_U16(u16Value);
5121 IEM_MC_ADVANCE_RIP();
5122 IEM_MC_END();
5123 break;
5124
5125 case IEMMODE_32BIT:
5126 IEM_MC_BEGIN(0, 1);
5127 IEM_MC_LOCAL(uint32_t, u32Value);
5128 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5129 IEM_MC_PUSH_U32_SREG(u32Value);
5130 IEM_MC_ADVANCE_RIP();
5131 IEM_MC_END();
5132 break;
5133
5134 case IEMMODE_64BIT:
5135 IEM_MC_BEGIN(0, 1);
5136 IEM_MC_LOCAL(uint64_t, u64Value);
5137 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5138 IEM_MC_PUSH_U64(u64Value);
5139 IEM_MC_ADVANCE_RIP();
5140 IEM_MC_END();
5141 break;
5142 }
5143
5144 return VINF_SUCCESS;
5145}
5146
5147
5148/** Opcode 0x0f 0xa0. */
5149FNIEMOP_DEF(iemOp_push_fs)
5150{
5151 IEMOP_MNEMONIC(push_fs, "push fs");
5152 IEMOP_HLP_MIN_386();
5153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5154 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5155}
5156
5157
5158/** Opcode 0x0f 0xa1. */
5159FNIEMOP_DEF(iemOp_pop_fs)
5160{
5161 IEMOP_MNEMONIC(pop_fs, "pop fs");
5162 IEMOP_HLP_MIN_386();
5163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5164 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5165}
5166
5167
5168/** Opcode 0x0f 0xa2. */
5169FNIEMOP_DEF(iemOp_cpuid)
5170{
5171 IEMOP_MNEMONIC(cpuid, "cpuid");
5172 IEMOP_HLP_MIN_486(); /* not all 486es. */
5173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5174 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5175}
5176
5177
5178/**
5179 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5180 * iemOp_bts_Ev_Gv.
5181 */
5182FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5183{
5184 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5185 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5186
5187 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5188 {
5189 /* register destination. */
5190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5191 switch (pVCpu->iem.s.enmEffOpSize)
5192 {
5193 case IEMMODE_16BIT:
5194 IEM_MC_BEGIN(3, 0);
5195 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5196 IEM_MC_ARG(uint16_t, u16Src, 1);
5197 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5198
5199 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5200 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5201 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5202 IEM_MC_REF_EFLAGS(pEFlags);
5203 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5204
5205 IEM_MC_ADVANCE_RIP();
5206 IEM_MC_END();
5207 return VINF_SUCCESS;
5208
5209 case IEMMODE_32BIT:
5210 IEM_MC_BEGIN(3, 0);
5211 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5212 IEM_MC_ARG(uint32_t, u32Src, 1);
5213 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5214
5215 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5216 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5217 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5218 IEM_MC_REF_EFLAGS(pEFlags);
5219 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5220
5221 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5222 IEM_MC_ADVANCE_RIP();
5223 IEM_MC_END();
5224 return VINF_SUCCESS;
5225
5226 case IEMMODE_64BIT:
5227 IEM_MC_BEGIN(3, 0);
5228 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5229 IEM_MC_ARG(uint64_t, u64Src, 1);
5230 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5231
5232 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5233 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5234 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5235 IEM_MC_REF_EFLAGS(pEFlags);
5236 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5237
5238 IEM_MC_ADVANCE_RIP();
5239 IEM_MC_END();
5240 return VINF_SUCCESS;
5241
5242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5243 }
5244 }
5245 else
5246 {
5247 /* memory destination. */
5248
5249 uint32_t fAccess;
5250 if (pImpl->pfnLockedU16)
5251 fAccess = IEM_ACCESS_DATA_RW;
5252 else /* BT */
5253 fAccess = IEM_ACCESS_DATA_R;
5254
5255 /** @todo test negative bit offsets! */
5256 switch (pVCpu->iem.s.enmEffOpSize)
5257 {
5258 case IEMMODE_16BIT:
5259 IEM_MC_BEGIN(3, 2);
5260 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5261 IEM_MC_ARG(uint16_t, u16Src, 1);
5262 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5264 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5265
5266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5267 if (pImpl->pfnLockedU16)
5268 IEMOP_HLP_DONE_DECODING();
5269 else
5270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5271 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5272 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5273 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5274 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5275 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5276 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5277 IEM_MC_FETCH_EFLAGS(EFlags);
5278
5279 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5280 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5281 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5282 else
5283 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5284 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5285
5286 IEM_MC_COMMIT_EFLAGS(EFlags);
5287 IEM_MC_ADVANCE_RIP();
5288 IEM_MC_END();
5289 return VINF_SUCCESS;
5290
5291 case IEMMODE_32BIT:
5292 IEM_MC_BEGIN(3, 2);
5293 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5294 IEM_MC_ARG(uint32_t, u32Src, 1);
5295 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5297 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5298
5299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5300 if (pImpl->pfnLockedU16)
5301 IEMOP_HLP_DONE_DECODING();
5302 else
5303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5304 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5305 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5306 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5307 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5308 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5309 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5310 IEM_MC_FETCH_EFLAGS(EFlags);
5311
5312 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5313 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5314 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5315 else
5316 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5317 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5318
5319 IEM_MC_COMMIT_EFLAGS(EFlags);
5320 IEM_MC_ADVANCE_RIP();
5321 IEM_MC_END();
5322 return VINF_SUCCESS;
5323
5324 case IEMMODE_64BIT:
5325 IEM_MC_BEGIN(3, 2);
5326 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5327 IEM_MC_ARG(uint64_t, u64Src, 1);
5328 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5330 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5331
5332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5333 if (pImpl->pfnLockedU16)
5334 IEMOP_HLP_DONE_DECODING();
5335 else
5336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5337 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5338 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5339 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5340 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5341 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5342 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5343 IEM_MC_FETCH_EFLAGS(EFlags);
5344
5345 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5346 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5347 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5348 else
5349 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5350 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5351
5352 IEM_MC_COMMIT_EFLAGS(EFlags);
5353 IEM_MC_ADVANCE_RIP();
5354 IEM_MC_END();
5355 return VINF_SUCCESS;
5356
5357 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5358 }
5359 }
5360}
5361
5362
5363/** Opcode 0x0f 0xa3. */
5364FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5365{
5366 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5367 IEMOP_HLP_MIN_386();
5368 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5369}
5370
5371
5372/**
5373 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5374 */
5375FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5376{
5377 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5378 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5379
5380 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5381 {
5382 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5384
5385 switch (pVCpu->iem.s.enmEffOpSize)
5386 {
5387 case IEMMODE_16BIT:
5388 IEM_MC_BEGIN(4, 0);
5389 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5390 IEM_MC_ARG(uint16_t, u16Src, 1);
5391 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5392 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5393
5394 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5395 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5396 IEM_MC_REF_EFLAGS(pEFlags);
5397 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5398
5399 IEM_MC_ADVANCE_RIP();
5400 IEM_MC_END();
5401 return VINF_SUCCESS;
5402
5403 case IEMMODE_32BIT:
5404 IEM_MC_BEGIN(4, 0);
5405 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5406 IEM_MC_ARG(uint32_t, u32Src, 1);
5407 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5408 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5409
5410 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5411 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5412 IEM_MC_REF_EFLAGS(pEFlags);
5413 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5414
5415 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5416 IEM_MC_ADVANCE_RIP();
5417 IEM_MC_END();
5418 return VINF_SUCCESS;
5419
5420 case IEMMODE_64BIT:
5421 IEM_MC_BEGIN(4, 0);
5422 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5423 IEM_MC_ARG(uint64_t, u64Src, 1);
5424 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5425 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5426
5427 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5428 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5429 IEM_MC_REF_EFLAGS(pEFlags);
5430 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5431
5432 IEM_MC_ADVANCE_RIP();
5433 IEM_MC_END();
5434 return VINF_SUCCESS;
5435
5436 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5437 }
5438 }
5439 else
5440 {
5441 switch (pVCpu->iem.s.enmEffOpSize)
5442 {
5443 case IEMMODE_16BIT:
5444 IEM_MC_BEGIN(4, 2);
5445 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5446 IEM_MC_ARG(uint16_t, u16Src, 1);
5447 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5448 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5450
5451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5452 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5453 IEM_MC_ASSIGN(cShiftArg, cShift);
5454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5455 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5456 IEM_MC_FETCH_EFLAGS(EFlags);
5457 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5458 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5459
5460 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5461 IEM_MC_COMMIT_EFLAGS(EFlags);
5462 IEM_MC_ADVANCE_RIP();
5463 IEM_MC_END();
5464 return VINF_SUCCESS;
5465
5466 case IEMMODE_32BIT:
5467 IEM_MC_BEGIN(4, 2);
5468 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5469 IEM_MC_ARG(uint32_t, u32Src, 1);
5470 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5471 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5473
5474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5475 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5476 IEM_MC_ASSIGN(cShiftArg, cShift);
5477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5478 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5479 IEM_MC_FETCH_EFLAGS(EFlags);
5480 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5481 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5482
5483 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5484 IEM_MC_COMMIT_EFLAGS(EFlags);
5485 IEM_MC_ADVANCE_RIP();
5486 IEM_MC_END();
5487 return VINF_SUCCESS;
5488
5489 case IEMMODE_64BIT:
5490 IEM_MC_BEGIN(4, 2);
5491 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5492 IEM_MC_ARG(uint64_t, u64Src, 1);
5493 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5494 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5496
5497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5498 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5499 IEM_MC_ASSIGN(cShiftArg, cShift);
5500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5501 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5502 IEM_MC_FETCH_EFLAGS(EFlags);
5503 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5504 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5505
5506 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5507 IEM_MC_COMMIT_EFLAGS(EFlags);
5508 IEM_MC_ADVANCE_RIP();
5509 IEM_MC_END();
5510 return VINF_SUCCESS;
5511
5512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5513 }
5514 }
5515}
5516
5517
5518/**
5519 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5520 */
5521FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5522{
5523 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5524 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5525
5526 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5527 {
5528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5529
5530 switch (pVCpu->iem.s.enmEffOpSize)
5531 {
5532 case IEMMODE_16BIT:
5533 IEM_MC_BEGIN(4, 0);
5534 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5535 IEM_MC_ARG(uint16_t, u16Src, 1);
5536 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5537 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5538
5539 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5540 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5541 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5542 IEM_MC_REF_EFLAGS(pEFlags);
5543 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5544
5545 IEM_MC_ADVANCE_RIP();
5546 IEM_MC_END();
5547 return VINF_SUCCESS;
5548
5549 case IEMMODE_32BIT:
5550 IEM_MC_BEGIN(4, 0);
5551 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5552 IEM_MC_ARG(uint32_t, u32Src, 1);
5553 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5554 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5555
5556 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5557 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5558 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5559 IEM_MC_REF_EFLAGS(pEFlags);
5560 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5561
5562 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5563 IEM_MC_ADVANCE_RIP();
5564 IEM_MC_END();
5565 return VINF_SUCCESS;
5566
5567 case IEMMODE_64BIT:
5568 IEM_MC_BEGIN(4, 0);
5569 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5570 IEM_MC_ARG(uint64_t, u64Src, 1);
5571 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5572 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5573
5574 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5575 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5576 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5577 IEM_MC_REF_EFLAGS(pEFlags);
5578 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5579
5580 IEM_MC_ADVANCE_RIP();
5581 IEM_MC_END();
5582 return VINF_SUCCESS;
5583
5584 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5585 }
5586 }
5587 else
5588 {
5589 switch (pVCpu->iem.s.enmEffOpSize)
5590 {
5591 case IEMMODE_16BIT:
5592 IEM_MC_BEGIN(4, 2);
5593 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5594 IEM_MC_ARG(uint16_t, u16Src, 1);
5595 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5596 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5598
5599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5601 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5602 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5603 IEM_MC_FETCH_EFLAGS(EFlags);
5604 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5605 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5606
5607 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5608 IEM_MC_COMMIT_EFLAGS(EFlags);
5609 IEM_MC_ADVANCE_RIP();
5610 IEM_MC_END();
5611 return VINF_SUCCESS;
5612
5613 case IEMMODE_32BIT:
5614 IEM_MC_BEGIN(4, 2);
5615 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5616 IEM_MC_ARG(uint32_t, u32Src, 1);
5617 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5618 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5619 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5620
5621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5623 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5624 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5625 IEM_MC_FETCH_EFLAGS(EFlags);
5626 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5627 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5628
5629 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5630 IEM_MC_COMMIT_EFLAGS(EFlags);
5631 IEM_MC_ADVANCE_RIP();
5632 IEM_MC_END();
5633 return VINF_SUCCESS;
5634
5635 case IEMMODE_64BIT:
5636 IEM_MC_BEGIN(4, 2);
5637 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5638 IEM_MC_ARG(uint64_t, u64Src, 1);
5639 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5640 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5641 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5642
5643 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5645 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5646 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5647 IEM_MC_FETCH_EFLAGS(EFlags);
5648 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5649 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5650
5651 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5652 IEM_MC_COMMIT_EFLAGS(EFlags);
5653 IEM_MC_ADVANCE_RIP();
5654 IEM_MC_END();
5655 return VINF_SUCCESS;
5656
5657 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5658 }
5659 }
5660}
5661
5662
5663
5664/** Opcode 0x0f 0xa4. */
5665FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5666{
5667 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5668 IEMOP_HLP_MIN_386();
5669 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5670}
5671
5672
5673/** Opcode 0x0f 0xa5. */
5674FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5675{
5676 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5677 IEMOP_HLP_MIN_386();
5678 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5679}
5680
5681
5682/** Opcode 0x0f 0xa8. */
5683FNIEMOP_DEF(iemOp_push_gs)
5684{
5685 IEMOP_MNEMONIC(push_gs, "push gs");
5686 IEMOP_HLP_MIN_386();
5687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5688 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5689}
5690
5691
5692/** Opcode 0x0f 0xa9. */
5693FNIEMOP_DEF(iemOp_pop_gs)
5694{
5695 IEMOP_MNEMONIC(pop_gs, "pop gs");
5696 IEMOP_HLP_MIN_386();
5697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5698 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5699}
5700
5701
5702/** Opcode 0x0f 0xaa. */
5703FNIEMOP_STUB(iemOp_rsm);
5704//IEMOP_HLP_MIN_386();
5705
5706
5707/** Opcode 0x0f 0xab. */
5708FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5709{
5710 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5711 IEMOP_HLP_MIN_386();
5712 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5713}
5714
5715
5716/** Opcode 0x0f 0xac. */
5717FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5718{
5719 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5720 IEMOP_HLP_MIN_386();
5721 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5722}
5723
5724
5725/** Opcode 0x0f 0xad. */
5726FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5727{
5728 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5729 IEMOP_HLP_MIN_386();
5730 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5731}
5732
5733
5734/** Opcode 0x0f 0xae mem/0. */
5735FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5736{
5737 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5738 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5739 return IEMOP_RAISE_INVALID_OPCODE();
5740
5741 IEM_MC_BEGIN(3, 1);
5742 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5743 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5744 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5747 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5748 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5749 IEM_MC_END();
5750 return VINF_SUCCESS;
5751}
5752
5753
5754/** Opcode 0x0f 0xae mem/1. */
5755FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5756{
5757 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5758 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5759 return IEMOP_RAISE_INVALID_OPCODE();
5760
5761 IEM_MC_BEGIN(3, 1);
5762 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5763 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5764 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5767 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5768 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5769 IEM_MC_END();
5770 return VINF_SUCCESS;
5771}
5772
5773
5774/** Opcode 0x0f 0xae mem/2. */
5775FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5776
5777/** Opcode 0x0f 0xae mem/3. */
5778FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5779
5780/** Opcode 0x0f 0xae mem/4. */
5781FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5782
5783/** Opcode 0x0f 0xae mem/5. */
5784FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5785
5786/** Opcode 0x0f 0xae mem/6. */
5787FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5788
5789/** Opcode 0x0f 0xae mem/7. */
5790FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5791
5792
5793/** Opcode 0x0f 0xae 11b/5. */
5794FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5795{
5796 RT_NOREF_PV(bRm);
5797 IEMOP_MNEMONIC(lfence, "lfence");
5798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5799 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5800 return IEMOP_RAISE_INVALID_OPCODE();
5801
5802 IEM_MC_BEGIN(0, 0);
5803 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5804 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5805 else
5806 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5807 IEM_MC_ADVANCE_RIP();
5808 IEM_MC_END();
5809 return VINF_SUCCESS;
5810}
5811
5812
5813/** Opcode 0x0f 0xae 11b/6. */
5814FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5815{
5816 RT_NOREF_PV(bRm);
5817 IEMOP_MNEMONIC(mfence, "mfence");
5818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5819 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5820 return IEMOP_RAISE_INVALID_OPCODE();
5821
5822 IEM_MC_BEGIN(0, 0);
5823 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5824 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5825 else
5826 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5827 IEM_MC_ADVANCE_RIP();
5828 IEM_MC_END();
5829 return VINF_SUCCESS;
5830}
5831
5832
5833/** Opcode 0x0f 0xae 11b/7. */
5834FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5835{
5836 RT_NOREF_PV(bRm);
5837 IEMOP_MNEMONIC(sfence, "sfence");
5838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5839 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5840 return IEMOP_RAISE_INVALID_OPCODE();
5841
5842 IEM_MC_BEGIN(0, 0);
5843 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5844 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5845 else
5846 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5847 IEM_MC_ADVANCE_RIP();
5848 IEM_MC_END();
5849 return VINF_SUCCESS;
5850}
5851
5852
5853/** Opcode 0xf3 0x0f 0xae 11b/0. */
5854FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5855
5856/** Opcode 0xf3 0x0f 0xae 11b/1. */
5857FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5858
5859/** Opcode 0xf3 0x0f 0xae 11b/2. */
5860FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5861
5862/** Opcode 0xf3 0x0f 0xae 11b/3. */
5863FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5864
5865
5866/** Opcode 0x0f 0xae. */
5867FNIEMOP_DEF(iemOp_Grp15)
5868{
5869/** @todo continue here tomorrow! (see bs3-cpu-decoding-1.c32 r113507). */
5870 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5871 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5872 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5873 {
5874 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5875 {
5876 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5877 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5878 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5879 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5880 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5881 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5882 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5883 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5884 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5885 }
5886 }
5887 else
5888 {
5889 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5890 {
5891 case 0:
5892 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5893 {
5894 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5895 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5896 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5897 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5898 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5899 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5900 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5901 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5902 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5903 }
5904 break;
5905
5906 case IEM_OP_PRF_REPZ:
5907 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5908 {
5909 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5910 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5911 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5912 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5913 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5914 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5915 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5916 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5917 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5918 }
5919 break;
5920
5921 default:
5922 return IEMOP_RAISE_INVALID_OPCODE();
5923 }
5924 }
5925}
5926
5927
5928/** Opcode 0x0f 0xaf. */
5929FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5930{
5931 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
5932 IEMOP_HLP_MIN_386();
5933 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5934 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5935}
5936
5937
5938/** Opcode 0x0f 0xb0. */
5939FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5940{
5941 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
5942 IEMOP_HLP_MIN_486();
5943 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5944
5945 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5946 {
5947 IEMOP_HLP_DONE_DECODING();
5948 IEM_MC_BEGIN(4, 0);
5949 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5950 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5951 IEM_MC_ARG(uint8_t, u8Src, 2);
5952 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5953
5954 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5955 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5956 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5957 IEM_MC_REF_EFLAGS(pEFlags);
5958 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5959 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5960 else
5961 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5962
5963 IEM_MC_ADVANCE_RIP();
5964 IEM_MC_END();
5965 }
5966 else
5967 {
5968 IEM_MC_BEGIN(4, 3);
5969 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5970 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5971 IEM_MC_ARG(uint8_t, u8Src, 2);
5972 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5973 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5974 IEM_MC_LOCAL(uint8_t, u8Al);
5975
5976 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5977 IEMOP_HLP_DONE_DECODING();
5978 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5979 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5980 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5981 IEM_MC_FETCH_EFLAGS(EFlags);
5982 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5983 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5984 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5985 else
5986 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5987
5988 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5989 IEM_MC_COMMIT_EFLAGS(EFlags);
5990 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5991 IEM_MC_ADVANCE_RIP();
5992 IEM_MC_END();
5993 }
5994 return VINF_SUCCESS;
5995}
5996
5997/** Opcode 0x0f 0xb1. */
5998FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5999{
6000 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6001 IEMOP_HLP_MIN_486();
6002 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6003
6004 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6005 {
6006 IEMOP_HLP_DONE_DECODING();
6007 switch (pVCpu->iem.s.enmEffOpSize)
6008 {
6009 case IEMMODE_16BIT:
6010 IEM_MC_BEGIN(4, 0);
6011 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6012 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6013 IEM_MC_ARG(uint16_t, u16Src, 2);
6014 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6015
6016 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6017 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6018 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6019 IEM_MC_REF_EFLAGS(pEFlags);
6020 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6021 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6022 else
6023 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6024
6025 IEM_MC_ADVANCE_RIP();
6026 IEM_MC_END();
6027 return VINF_SUCCESS;
6028
6029 case IEMMODE_32BIT:
6030 IEM_MC_BEGIN(4, 0);
6031 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6032 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6033 IEM_MC_ARG(uint32_t, u32Src, 2);
6034 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6035
6036 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6037 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6038 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6039 IEM_MC_REF_EFLAGS(pEFlags);
6040 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6041 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6042 else
6043 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6044
6045 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6046 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6047 IEM_MC_ADVANCE_RIP();
6048 IEM_MC_END();
6049 return VINF_SUCCESS;
6050
6051 case IEMMODE_64BIT:
6052 IEM_MC_BEGIN(4, 0);
6053 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6054 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6055#ifdef RT_ARCH_X86
6056 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6057#else
6058 IEM_MC_ARG(uint64_t, u64Src, 2);
6059#endif
6060 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6061
6062 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6063 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6064 IEM_MC_REF_EFLAGS(pEFlags);
6065#ifdef RT_ARCH_X86
6066 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6067 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6068 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6069 else
6070 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6071#else
6072 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6073 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6074 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6075 else
6076 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6077#endif
6078
6079 IEM_MC_ADVANCE_RIP();
6080 IEM_MC_END();
6081 return VINF_SUCCESS;
6082
6083 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6084 }
6085 }
6086 else
6087 {
6088 switch (pVCpu->iem.s.enmEffOpSize)
6089 {
6090 case IEMMODE_16BIT:
6091 IEM_MC_BEGIN(4, 3);
6092 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6093 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6094 IEM_MC_ARG(uint16_t, u16Src, 2);
6095 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6097 IEM_MC_LOCAL(uint16_t, u16Ax);
6098
6099 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6100 IEMOP_HLP_DONE_DECODING();
6101 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6102 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6103 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6104 IEM_MC_FETCH_EFLAGS(EFlags);
6105 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6106 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6107 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6108 else
6109 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6110
6111 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6112 IEM_MC_COMMIT_EFLAGS(EFlags);
6113 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6114 IEM_MC_ADVANCE_RIP();
6115 IEM_MC_END();
6116 return VINF_SUCCESS;
6117
6118 case IEMMODE_32BIT:
6119 IEM_MC_BEGIN(4, 3);
6120 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6121 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6122 IEM_MC_ARG(uint32_t, u32Src, 2);
6123 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6125 IEM_MC_LOCAL(uint32_t, u32Eax);
6126
6127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6128 IEMOP_HLP_DONE_DECODING();
6129 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6130 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6131 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6132 IEM_MC_FETCH_EFLAGS(EFlags);
6133 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6134 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6135 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6136 else
6137 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6138
6139 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6140 IEM_MC_COMMIT_EFLAGS(EFlags);
6141 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6142 IEM_MC_ADVANCE_RIP();
6143 IEM_MC_END();
6144 return VINF_SUCCESS;
6145
6146 case IEMMODE_64BIT:
6147 IEM_MC_BEGIN(4, 3);
6148 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6149 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6150#ifdef RT_ARCH_X86
6151 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6152#else
6153 IEM_MC_ARG(uint64_t, u64Src, 2);
6154#endif
6155 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6157 IEM_MC_LOCAL(uint64_t, u64Rax);
6158
6159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6160 IEMOP_HLP_DONE_DECODING();
6161 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6162 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6163 IEM_MC_FETCH_EFLAGS(EFlags);
6164 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6165#ifdef RT_ARCH_X86
6166 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6167 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6168 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6169 else
6170 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6171#else
6172 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6173 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6174 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6175 else
6176 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6177#endif
6178
6179 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6180 IEM_MC_COMMIT_EFLAGS(EFlags);
6181 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6182 IEM_MC_ADVANCE_RIP();
6183 IEM_MC_END();
6184 return VINF_SUCCESS;
6185
6186 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6187 }
6188 }
6189}
6190
6191
6192FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6193{
6194 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6195 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6196
6197 switch (pVCpu->iem.s.enmEffOpSize)
6198 {
6199 case IEMMODE_16BIT:
6200 IEM_MC_BEGIN(5, 1);
6201 IEM_MC_ARG(uint16_t, uSel, 0);
6202 IEM_MC_ARG(uint16_t, offSeg, 1);
6203 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6204 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6205 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6206 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6209 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6210 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6211 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6212 IEM_MC_END();
6213 return VINF_SUCCESS;
6214
6215 case IEMMODE_32BIT:
6216 IEM_MC_BEGIN(5, 1);
6217 IEM_MC_ARG(uint16_t, uSel, 0);
6218 IEM_MC_ARG(uint32_t, offSeg, 1);
6219 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6220 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6221 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6222 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6225 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6226 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6227 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6228 IEM_MC_END();
6229 return VINF_SUCCESS;
6230
6231 case IEMMODE_64BIT:
6232 IEM_MC_BEGIN(5, 1);
6233 IEM_MC_ARG(uint16_t, uSel, 0);
6234 IEM_MC_ARG(uint64_t, offSeg, 1);
6235 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6236 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6237 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6238 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6241 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6242 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6243 else
6244 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6245 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6246 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6247 IEM_MC_END();
6248 return VINF_SUCCESS;
6249
6250 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6251 }
6252}
6253
6254
6255/** Opcode 0x0f 0xb2. */
6256FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6257{
6258 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6259 IEMOP_HLP_MIN_386();
6260 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6261 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6262 return IEMOP_RAISE_INVALID_OPCODE();
6263 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6264}
6265
6266
6267/** Opcode 0x0f 0xb3. */
6268FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6269{
6270 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6271 IEMOP_HLP_MIN_386();
6272 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6273}
6274
6275
6276/** Opcode 0x0f 0xb4. */
6277FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6278{
6279 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6280 IEMOP_HLP_MIN_386();
6281 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6282 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6283 return IEMOP_RAISE_INVALID_OPCODE();
6284 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6285}
6286
6287
6288/** Opcode 0x0f 0xb5. */
6289FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6290{
6291 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6292 IEMOP_HLP_MIN_386();
6293 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6294 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6295 return IEMOP_RAISE_INVALID_OPCODE();
6296 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6297}
6298
6299
6300/** Opcode 0x0f 0xb6. */
6301FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6302{
6303 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6304 IEMOP_HLP_MIN_386();
6305
6306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6307
6308 /*
6309 * If rm is denoting a register, no more instruction bytes.
6310 */
6311 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6312 {
6313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6314 switch (pVCpu->iem.s.enmEffOpSize)
6315 {
6316 case IEMMODE_16BIT:
6317 IEM_MC_BEGIN(0, 1);
6318 IEM_MC_LOCAL(uint16_t, u16Value);
6319 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6320 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6321 IEM_MC_ADVANCE_RIP();
6322 IEM_MC_END();
6323 return VINF_SUCCESS;
6324
6325 case IEMMODE_32BIT:
6326 IEM_MC_BEGIN(0, 1);
6327 IEM_MC_LOCAL(uint32_t, u32Value);
6328 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6329 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6330 IEM_MC_ADVANCE_RIP();
6331 IEM_MC_END();
6332 return VINF_SUCCESS;
6333
6334 case IEMMODE_64BIT:
6335 IEM_MC_BEGIN(0, 1);
6336 IEM_MC_LOCAL(uint64_t, u64Value);
6337 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6338 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6339 IEM_MC_ADVANCE_RIP();
6340 IEM_MC_END();
6341 return VINF_SUCCESS;
6342
6343 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6344 }
6345 }
6346 else
6347 {
6348 /*
6349 * We're loading a register from memory.
6350 */
6351 switch (pVCpu->iem.s.enmEffOpSize)
6352 {
6353 case IEMMODE_16BIT:
6354 IEM_MC_BEGIN(0, 2);
6355 IEM_MC_LOCAL(uint16_t, u16Value);
6356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6359 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6360 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6361 IEM_MC_ADVANCE_RIP();
6362 IEM_MC_END();
6363 return VINF_SUCCESS;
6364
6365 case IEMMODE_32BIT:
6366 IEM_MC_BEGIN(0, 2);
6367 IEM_MC_LOCAL(uint32_t, u32Value);
6368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6371 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6372 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6373 IEM_MC_ADVANCE_RIP();
6374 IEM_MC_END();
6375 return VINF_SUCCESS;
6376
6377 case IEMMODE_64BIT:
6378 IEM_MC_BEGIN(0, 2);
6379 IEM_MC_LOCAL(uint64_t, u64Value);
6380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6383 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6384 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6385 IEM_MC_ADVANCE_RIP();
6386 IEM_MC_END();
6387 return VINF_SUCCESS;
6388
6389 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6390 }
6391 }
6392}
6393
6394
6395/** Opcode 0x0f 0xb7. */
6396FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6397{
6398 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6399 IEMOP_HLP_MIN_386();
6400
6401 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6402
6403 /** @todo Not entirely sure how the operand size prefix is handled here,
6404 * assuming that it will be ignored. Would be nice to have a few
6405 * test for this. */
6406 /*
6407 * If rm is denoting a register, no more instruction bytes.
6408 */
6409 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6410 {
6411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6412 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6413 {
6414 IEM_MC_BEGIN(0, 1);
6415 IEM_MC_LOCAL(uint32_t, u32Value);
6416 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6417 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6418 IEM_MC_ADVANCE_RIP();
6419 IEM_MC_END();
6420 }
6421 else
6422 {
6423 IEM_MC_BEGIN(0, 1);
6424 IEM_MC_LOCAL(uint64_t, u64Value);
6425 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6426 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6427 IEM_MC_ADVANCE_RIP();
6428 IEM_MC_END();
6429 }
6430 }
6431 else
6432 {
6433 /*
6434 * We're loading a register from memory.
6435 */
6436 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6437 {
6438 IEM_MC_BEGIN(0, 2);
6439 IEM_MC_LOCAL(uint32_t, u32Value);
6440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6441 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6443 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6444 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6445 IEM_MC_ADVANCE_RIP();
6446 IEM_MC_END();
6447 }
6448 else
6449 {
6450 IEM_MC_BEGIN(0, 2);
6451 IEM_MC_LOCAL(uint64_t, u64Value);
6452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6455 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6456 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6457 IEM_MC_ADVANCE_RIP();
6458 IEM_MC_END();
6459 }
6460 }
6461 return VINF_SUCCESS;
6462}
6463
6464
6465/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6466FNIEMOP_UD_STUB(iemOp_jmpe);
6467/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6468FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6469
6470
6471/** Opcode 0x0f 0xb9. */
6472FNIEMOP_DEF(iemOp_Grp10)
6473{
6474 Log(("iemOp_Grp10 -> #UD\n"));
6475 return IEMOP_RAISE_INVALID_OPCODE();
6476}
6477
6478
6479/** Opcode 0x0f 0xba. */
6480FNIEMOP_DEF(iemOp_Grp8)
6481{
6482 IEMOP_HLP_MIN_386();
6483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6484 PCIEMOPBINSIZES pImpl;
6485 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6486 {
6487 case 0: case 1: case 2: case 3:
6488 return IEMOP_RAISE_INVALID_OPCODE();
6489 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6490 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6491 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6492 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6493 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6494 }
6495 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6496
6497 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6498 {
6499 /* register destination. */
6500 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6502
6503 switch (pVCpu->iem.s.enmEffOpSize)
6504 {
6505 case IEMMODE_16BIT:
6506 IEM_MC_BEGIN(3, 0);
6507 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6508 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6509 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6510
6511 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6512 IEM_MC_REF_EFLAGS(pEFlags);
6513 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6514
6515 IEM_MC_ADVANCE_RIP();
6516 IEM_MC_END();
6517 return VINF_SUCCESS;
6518
6519 case IEMMODE_32BIT:
6520 IEM_MC_BEGIN(3, 0);
6521 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6522 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6523 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6524
6525 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6526 IEM_MC_REF_EFLAGS(pEFlags);
6527 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6528
6529 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6530 IEM_MC_ADVANCE_RIP();
6531 IEM_MC_END();
6532 return VINF_SUCCESS;
6533
6534 case IEMMODE_64BIT:
6535 IEM_MC_BEGIN(3, 0);
6536 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6537 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6538 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6539
6540 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6541 IEM_MC_REF_EFLAGS(pEFlags);
6542 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6543
6544 IEM_MC_ADVANCE_RIP();
6545 IEM_MC_END();
6546 return VINF_SUCCESS;
6547
6548 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6549 }
6550 }
6551 else
6552 {
6553 /* memory destination. */
6554
6555 uint32_t fAccess;
6556 if (pImpl->pfnLockedU16)
6557 fAccess = IEM_ACCESS_DATA_RW;
6558 else /* BT */
6559 fAccess = IEM_ACCESS_DATA_R;
6560
6561 /** @todo test negative bit offsets! */
6562 switch (pVCpu->iem.s.enmEffOpSize)
6563 {
6564 case IEMMODE_16BIT:
6565 IEM_MC_BEGIN(3, 1);
6566 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6567 IEM_MC_ARG(uint16_t, u16Src, 1);
6568 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6570
6571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6572 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6573 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6574 if (pImpl->pfnLockedU16)
6575 IEMOP_HLP_DONE_DECODING();
6576 else
6577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6578 IEM_MC_FETCH_EFLAGS(EFlags);
6579 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6580 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6581 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6582 else
6583 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6584 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6585
6586 IEM_MC_COMMIT_EFLAGS(EFlags);
6587 IEM_MC_ADVANCE_RIP();
6588 IEM_MC_END();
6589 return VINF_SUCCESS;
6590
6591 case IEMMODE_32BIT:
6592 IEM_MC_BEGIN(3, 1);
6593 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6594 IEM_MC_ARG(uint32_t, u32Src, 1);
6595 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6597
6598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6599 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6600 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6601 if (pImpl->pfnLockedU16)
6602 IEMOP_HLP_DONE_DECODING();
6603 else
6604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6605 IEM_MC_FETCH_EFLAGS(EFlags);
6606 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6607 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6608 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6609 else
6610 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6611 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6612
6613 IEM_MC_COMMIT_EFLAGS(EFlags);
6614 IEM_MC_ADVANCE_RIP();
6615 IEM_MC_END();
6616 return VINF_SUCCESS;
6617
6618 case IEMMODE_64BIT:
6619 IEM_MC_BEGIN(3, 1);
6620 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6621 IEM_MC_ARG(uint64_t, u64Src, 1);
6622 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6623 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6624
6625 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6626 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6627 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6628 if (pImpl->pfnLockedU16)
6629 IEMOP_HLP_DONE_DECODING();
6630 else
6631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6632 IEM_MC_FETCH_EFLAGS(EFlags);
6633 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6634 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6635 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6636 else
6637 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6638 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6639
6640 IEM_MC_COMMIT_EFLAGS(EFlags);
6641 IEM_MC_ADVANCE_RIP();
6642 IEM_MC_END();
6643 return VINF_SUCCESS;
6644
6645 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6646 }
6647 }
6648
6649}
6650
6651
6652/** Opcode 0x0f 0xbb. */
6653FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6654{
6655 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6656 IEMOP_HLP_MIN_386();
6657 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6658}
6659
6660
6661/** Opcode 0x0f 0xbc. */
6662FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6663{
6664 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6665 IEMOP_HLP_MIN_386();
6666 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6667 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6668}
6669
6670
6671/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6672FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6673
6674
6675/** Opcode 0x0f 0xbd. */
6676FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6677{
6678 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6679 IEMOP_HLP_MIN_386();
6680 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6681 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6682}
6683
6684
6685/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6686FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6687
6688
6689/** Opcode 0x0f 0xbe. */
6690FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6691{
6692 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6693 IEMOP_HLP_MIN_386();
6694
6695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6696
6697 /*
6698 * If rm is denoting a register, no more instruction bytes.
6699 */
6700 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6701 {
6702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6703 switch (pVCpu->iem.s.enmEffOpSize)
6704 {
6705 case IEMMODE_16BIT:
6706 IEM_MC_BEGIN(0, 1);
6707 IEM_MC_LOCAL(uint16_t, u16Value);
6708 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6709 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6710 IEM_MC_ADVANCE_RIP();
6711 IEM_MC_END();
6712 return VINF_SUCCESS;
6713
6714 case IEMMODE_32BIT:
6715 IEM_MC_BEGIN(0, 1);
6716 IEM_MC_LOCAL(uint32_t, u32Value);
6717 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6718 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6719 IEM_MC_ADVANCE_RIP();
6720 IEM_MC_END();
6721 return VINF_SUCCESS;
6722
6723 case IEMMODE_64BIT:
6724 IEM_MC_BEGIN(0, 1);
6725 IEM_MC_LOCAL(uint64_t, u64Value);
6726 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6727 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6728 IEM_MC_ADVANCE_RIP();
6729 IEM_MC_END();
6730 return VINF_SUCCESS;
6731
6732 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6733 }
6734 }
6735 else
6736 {
6737 /*
6738 * We're loading a register from memory.
6739 */
6740 switch (pVCpu->iem.s.enmEffOpSize)
6741 {
6742 case IEMMODE_16BIT:
6743 IEM_MC_BEGIN(0, 2);
6744 IEM_MC_LOCAL(uint16_t, u16Value);
6745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6748 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6749 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6750 IEM_MC_ADVANCE_RIP();
6751 IEM_MC_END();
6752 return VINF_SUCCESS;
6753
6754 case IEMMODE_32BIT:
6755 IEM_MC_BEGIN(0, 2);
6756 IEM_MC_LOCAL(uint32_t, u32Value);
6757 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6760 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6761 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6762 IEM_MC_ADVANCE_RIP();
6763 IEM_MC_END();
6764 return VINF_SUCCESS;
6765
6766 case IEMMODE_64BIT:
6767 IEM_MC_BEGIN(0, 2);
6768 IEM_MC_LOCAL(uint64_t, u64Value);
6769 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6770 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6772 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6773 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6774 IEM_MC_ADVANCE_RIP();
6775 IEM_MC_END();
6776 return VINF_SUCCESS;
6777
6778 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6779 }
6780 }
6781}
6782
6783
6784/** Opcode 0x0f 0xbf. */
6785FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6786{
6787 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6788 IEMOP_HLP_MIN_386();
6789
6790 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6791
6792 /** @todo Not entirely sure how the operand size prefix is handled here,
6793 * assuming that it will be ignored. Would be nice to have a few
6794 * test for this. */
6795 /*
6796 * If rm is denoting a register, no more instruction bytes.
6797 */
6798 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6799 {
6800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6801 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6802 {
6803 IEM_MC_BEGIN(0, 1);
6804 IEM_MC_LOCAL(uint32_t, u32Value);
6805 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6806 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6807 IEM_MC_ADVANCE_RIP();
6808 IEM_MC_END();
6809 }
6810 else
6811 {
6812 IEM_MC_BEGIN(0, 1);
6813 IEM_MC_LOCAL(uint64_t, u64Value);
6814 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6815 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6816 IEM_MC_ADVANCE_RIP();
6817 IEM_MC_END();
6818 }
6819 }
6820 else
6821 {
6822 /*
6823 * We're loading a register from memory.
6824 */
6825 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6826 {
6827 IEM_MC_BEGIN(0, 2);
6828 IEM_MC_LOCAL(uint32_t, u32Value);
6829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6832 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6833 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6834 IEM_MC_ADVANCE_RIP();
6835 IEM_MC_END();
6836 }
6837 else
6838 {
6839 IEM_MC_BEGIN(0, 2);
6840 IEM_MC_LOCAL(uint64_t, u64Value);
6841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6844 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6845 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6846 IEM_MC_ADVANCE_RIP();
6847 IEM_MC_END();
6848 }
6849 }
6850 return VINF_SUCCESS;
6851}
6852
6853
6854/** Opcode 0x0f 0xc0. */
6855FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6856{
6857 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6858 IEMOP_HLP_MIN_486();
6859 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6860
6861 /*
6862 * If rm is denoting a register, no more instruction bytes.
6863 */
6864 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6865 {
6866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6867
6868 IEM_MC_BEGIN(3, 0);
6869 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6870 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6871 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6872
6873 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6874 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6875 IEM_MC_REF_EFLAGS(pEFlags);
6876 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6877
6878 IEM_MC_ADVANCE_RIP();
6879 IEM_MC_END();
6880 }
6881 else
6882 {
6883 /*
6884 * We're accessing memory.
6885 */
6886 IEM_MC_BEGIN(3, 3);
6887 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6888 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6889 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6890 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6892
6893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6894 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6895 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6896 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6897 IEM_MC_FETCH_EFLAGS(EFlags);
6898 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6899 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6900 else
6901 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6902
6903 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6904 IEM_MC_COMMIT_EFLAGS(EFlags);
6905 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6906 IEM_MC_ADVANCE_RIP();
6907 IEM_MC_END();
6908 return VINF_SUCCESS;
6909 }
6910 return VINF_SUCCESS;
6911}
6912
6913
6914/** Opcode 0x0f 0xc1. */
6915FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6916{
6917 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
6918 IEMOP_HLP_MIN_486();
6919 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6920
6921 /*
6922 * If rm is denoting a register, no more instruction bytes.
6923 */
6924 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6925 {
6926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6927
6928 switch (pVCpu->iem.s.enmEffOpSize)
6929 {
6930 case IEMMODE_16BIT:
6931 IEM_MC_BEGIN(3, 0);
6932 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6933 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6934 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6935
6936 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6937 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6938 IEM_MC_REF_EFLAGS(pEFlags);
6939 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6940
6941 IEM_MC_ADVANCE_RIP();
6942 IEM_MC_END();
6943 return VINF_SUCCESS;
6944
6945 case IEMMODE_32BIT:
6946 IEM_MC_BEGIN(3, 0);
6947 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6948 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6949 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6950
6951 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6952 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6953 IEM_MC_REF_EFLAGS(pEFlags);
6954 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6955
6956 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6957 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6958 IEM_MC_ADVANCE_RIP();
6959 IEM_MC_END();
6960 return VINF_SUCCESS;
6961
6962 case IEMMODE_64BIT:
6963 IEM_MC_BEGIN(3, 0);
6964 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6965 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6966 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6967
6968 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6969 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6970 IEM_MC_REF_EFLAGS(pEFlags);
6971 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6972
6973 IEM_MC_ADVANCE_RIP();
6974 IEM_MC_END();
6975 return VINF_SUCCESS;
6976
6977 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6978 }
6979 }
6980 else
6981 {
6982 /*
6983 * We're accessing memory.
6984 */
6985 switch (pVCpu->iem.s.enmEffOpSize)
6986 {
6987 case IEMMODE_16BIT:
6988 IEM_MC_BEGIN(3, 3);
6989 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6990 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6991 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6992 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6994
6995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6996 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6997 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6998 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6999 IEM_MC_FETCH_EFLAGS(EFlags);
7000 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7001 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7002 else
7003 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7004
7005 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7006 IEM_MC_COMMIT_EFLAGS(EFlags);
7007 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7008 IEM_MC_ADVANCE_RIP();
7009 IEM_MC_END();
7010 return VINF_SUCCESS;
7011
7012 case IEMMODE_32BIT:
7013 IEM_MC_BEGIN(3, 3);
7014 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7015 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7016 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7017 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7018 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7019
7020 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7021 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7022 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7023 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7024 IEM_MC_FETCH_EFLAGS(EFlags);
7025 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7026 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7027 else
7028 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7029
7030 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7031 IEM_MC_COMMIT_EFLAGS(EFlags);
7032 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7033 IEM_MC_ADVANCE_RIP();
7034 IEM_MC_END();
7035 return VINF_SUCCESS;
7036
7037 case IEMMODE_64BIT:
7038 IEM_MC_BEGIN(3, 3);
7039 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7040 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7041 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7042 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7044
7045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7046 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7047 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7048 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7049 IEM_MC_FETCH_EFLAGS(EFlags);
7050 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7051 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7052 else
7053 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7054
7055 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7056 IEM_MC_COMMIT_EFLAGS(EFlags);
7057 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7058 IEM_MC_ADVANCE_RIP();
7059 IEM_MC_END();
7060 return VINF_SUCCESS;
7061
7062 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7063 }
7064 }
7065}
7066
7067
7068/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
7069FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
7070/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
7071FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
7072/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
7073FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
7074/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
7075FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
7076
7077
7078/** Opcode 0x0f 0xc3. */
7079FNIEMOP_DEF(iemOp_movnti_My_Gy)
7080{
7081 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7082
7083 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7084
7085 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7086 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7087 {
7088 switch (pVCpu->iem.s.enmEffOpSize)
7089 {
7090 case IEMMODE_32BIT:
7091 IEM_MC_BEGIN(0, 2);
7092 IEM_MC_LOCAL(uint32_t, u32Value);
7093 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7094
7095 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7097 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7098 return IEMOP_RAISE_INVALID_OPCODE();
7099
7100 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7101 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7102 IEM_MC_ADVANCE_RIP();
7103 IEM_MC_END();
7104 break;
7105
7106 case IEMMODE_64BIT:
7107 IEM_MC_BEGIN(0, 2);
7108 IEM_MC_LOCAL(uint64_t, u64Value);
7109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7110
7111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7113 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7114 return IEMOP_RAISE_INVALID_OPCODE();
7115
7116 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7117 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7118 IEM_MC_ADVANCE_RIP();
7119 IEM_MC_END();
7120 break;
7121
7122 case IEMMODE_16BIT:
7123 /** @todo check this form. */
7124 return IEMOP_RAISE_INVALID_OPCODE();
7125 }
7126 }
7127 else
7128 return IEMOP_RAISE_INVALID_OPCODE();
7129 return VINF_SUCCESS;
7130}
7131/* Opcode 0x66 0x0f 0xc3 - invalid */
7132/* Opcode 0xf3 0x0f 0xc3 - invalid */
7133/* Opcode 0xf2 0x0f 0xc3 - invalid */
7134
7135/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
7136FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7137/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
7138FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
7139/* Opcode 0xf3 0x0f 0xc4 - invalid */
7140/* Opcode 0xf2 0x0f 0xc4 - invalid */
7141
7142/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7143FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7144/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
7145FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
7146/* Opcode 0xf3 0x0f 0xc5 - invalid */
7147/* Opcode 0xf2 0x0f 0xc5 - invalid */
7148
7149/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
7150FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
7151/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
7152FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
7153/* Opcode 0xf3 0x0f 0xc6 - invalid */
7154/* Opcode 0xf2 0x0f 0xc6 - invalid */
7155
7156
7157/** Opcode 0x0f 0xc7 !11/1. */
7158FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7159{
7160 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7161
7162 IEM_MC_BEGIN(4, 3);
7163 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7164 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7165 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7166 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7167 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7168 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7170
7171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7172 IEMOP_HLP_DONE_DECODING();
7173 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7174
7175 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7176 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7177 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7178
7179 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7180 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7181 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7182
7183 IEM_MC_FETCH_EFLAGS(EFlags);
7184 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7185 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7186 else
7187 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7188
7189 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7190 IEM_MC_COMMIT_EFLAGS(EFlags);
7191 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7192 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7193 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7194 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7195 IEM_MC_ENDIF();
7196 IEM_MC_ADVANCE_RIP();
7197
7198 IEM_MC_END();
7199 return VINF_SUCCESS;
7200}
7201
7202
7203/** Opcode REX.W 0x0f 0xc7 !11/1. */
7204FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7205{
7206 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7207 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7208 {
7209#if 0
7210 RT_NOREF(bRm);
7211 IEMOP_BITCH_ABOUT_STUB();
7212 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7213#else
7214 IEM_MC_BEGIN(4, 3);
7215 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7216 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7217 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7218 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7219 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7220 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7222
7223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7224 IEMOP_HLP_DONE_DECODING();
7225 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7226 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7227
7228 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7229 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7230 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7231
7232 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7233 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7234 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7235
7236 IEM_MC_FETCH_EFLAGS(EFlags);
7237# ifdef RT_ARCH_AMD64
7238 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7239 {
7240 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7241 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7242 else
7243 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7244 }
7245 else
7246# endif
7247 {
7248 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7249 accesses and not all all atomic, which works fine on in UNI CPU guest
7250 configuration (ignoring DMA). If guest SMP is active we have no choice
7251 but to use a rendezvous callback here. Sigh. */
7252 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7253 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7254 else
7255 {
7256 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7257 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7258 }
7259 }
7260
7261 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7262 IEM_MC_COMMIT_EFLAGS(EFlags);
7263 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7264 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7265 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7266 IEM_MC_ENDIF();
7267 IEM_MC_ADVANCE_RIP();
7268
7269 IEM_MC_END();
7270 return VINF_SUCCESS;
7271#endif
7272 }
7273 Log(("cmpxchg16b -> #UD\n"));
7274 return IEMOP_RAISE_INVALID_OPCODE();
7275}
7276
7277
7278/** Opcode 0x0f 0xc7 11/6. */
7279FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7280
7281/** Opcode 0x0f 0xc7 !11/6. */
7282FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7283
7284/** Opcode 0x66 0x0f 0xc7 !11/6. */
7285FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7286
7287/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7288FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7289
7290/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7291FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7292
7293
7294/** Opcode 0x0f 0xc7. */
7295FNIEMOP_DEF(iemOp_Grp9)
7296{
7297 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
7298 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7299 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7300 {
7301 case 0: case 2: case 3: case 4: case 5:
7302 return IEMOP_RAISE_INVALID_OPCODE();
7303 case 1:
7304 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
7305 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
7306 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
7307 return IEMOP_RAISE_INVALID_OPCODE();
7308 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7309 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7310 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7311 case 6:
7312 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7313 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
7314 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7315 {
7316 case 0:
7317 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
7318 case IEM_OP_PRF_SIZE_OP:
7319 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
7320 case IEM_OP_PRF_REPZ:
7321 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
7322 default:
7323 return IEMOP_RAISE_INVALID_OPCODE();
7324 }
7325 case 7:
7326 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7327 {
7328 case 0:
7329 case IEM_OP_PRF_REPZ:
7330 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
7331 default:
7332 return IEMOP_RAISE_INVALID_OPCODE();
7333 }
7334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7335 }
7336}
7337
7338
7339/**
7340 * Common 'bswap register' helper.
7341 */
7342FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7343{
7344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7345 switch (pVCpu->iem.s.enmEffOpSize)
7346 {
7347 case IEMMODE_16BIT:
7348 IEM_MC_BEGIN(1, 0);
7349 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7350 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7351 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7352 IEM_MC_ADVANCE_RIP();
7353 IEM_MC_END();
7354 return VINF_SUCCESS;
7355
7356 case IEMMODE_32BIT:
7357 IEM_MC_BEGIN(1, 0);
7358 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7359 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7360 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7361 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7362 IEM_MC_ADVANCE_RIP();
7363 IEM_MC_END();
7364 return VINF_SUCCESS;
7365
7366 case IEMMODE_64BIT:
7367 IEM_MC_BEGIN(1, 0);
7368 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7369 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7370 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7371 IEM_MC_ADVANCE_RIP();
7372 IEM_MC_END();
7373 return VINF_SUCCESS;
7374
7375 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7376 }
7377}
7378
7379
7380/** Opcode 0x0f 0xc8. */
7381FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7382{
7383 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7384 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7385 prefix. REX.B is the correct prefix it appears. For a parallel
7386 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7387 IEMOP_HLP_MIN_486();
7388 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7389}
7390
7391
7392/** Opcode 0x0f 0xc9. */
7393FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7394{
7395 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7396 IEMOP_HLP_MIN_486();
7397 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7398}
7399
7400
7401/** Opcode 0x0f 0xca. */
7402FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7403{
7404 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7405 IEMOP_HLP_MIN_486();
7406 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7407}
7408
7409
7410/** Opcode 0x0f 0xcb. */
7411FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7412{
7413 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7414 IEMOP_HLP_MIN_486();
7415 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7416}
7417
7418
7419/** Opcode 0x0f 0xcc. */
7420FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7421{
7422 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7423 IEMOP_HLP_MIN_486();
7424 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7425}
7426
7427
7428/** Opcode 0x0f 0xcd. */
7429FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7430{
7431 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7432 IEMOP_HLP_MIN_486();
7433 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7434}
7435
7436
7437/** Opcode 0x0f 0xce. */
7438FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7439{
7440 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7441 IEMOP_HLP_MIN_486();
7442 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7443}
7444
7445
7446/** Opcode 0x0f 0xcf. */
7447FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7448{
7449 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7450 IEMOP_HLP_MIN_486();
7451 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7452}
7453
7454
7455/* Opcode 0x0f 0xd0 - invalid */
7456/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7457FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7458/* Opcode 0xf3 0x0f 0xd0 - invalid */
7459/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7460FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7461
7462/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7463FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7464/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7465FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7466/* Opcode 0xf3 0x0f 0xd1 - invalid */
7467/* Opcode 0xf2 0x0f 0xd1 - invalid */
7468
7469/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7470FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7471/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7472FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7473/* Opcode 0xf3 0x0f 0xd2 - invalid */
7474/* Opcode 0xf2 0x0f 0xd2 - invalid */
7475
7476/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7477FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7478/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7479FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7480/* Opcode 0xf3 0x0f 0xd3 - invalid */
7481/* Opcode 0xf2 0x0f 0xd3 - invalid */
7482
7483/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7484FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7485/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7486FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7487/* Opcode 0xf3 0x0f 0xd4 - invalid */
7488/* Opcode 0xf2 0x0f 0xd4 - invalid */
7489
7490/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7491FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7492/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7493FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7494/* Opcode 0xf3 0x0f 0xd5 - invalid */
7495/* Opcode 0xf2 0x0f 0xd5 - invalid */
7496
7497/* Opcode 0x0f 0xd6 - invalid */
7498/** Opcode 0x66 0x0f 0xd6 - vmovq Wq, Vq */
7499FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
7500/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7501FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7502/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7503FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7504#if 0
7505FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7506{
7507 /* Docs says register only. */
7508 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7509
7510 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7511 {
7512 case IEM_OP_PRF_SIZE_OP: /* SSE */
7513 IEMOP_MNEMONIC(movq_Wq_Vq, "movq Wq,Vq");
7514 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7515 IEM_MC_BEGIN(2, 0);
7516 IEM_MC_ARG(uint64_t *, pDst, 0);
7517 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
7518 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7519 IEM_MC_PREPARE_SSE_USAGE();
7520 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7521 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7522 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7523 IEM_MC_ADVANCE_RIP();
7524 IEM_MC_END();
7525 return VINF_SUCCESS;
7526
7527 case 0: /* MMX */
7528 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7529 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7530 IEM_MC_BEGIN(2, 0);
7531 IEM_MC_ARG(uint64_t *, pDst, 0);
7532 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7533 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7534 IEM_MC_PREPARE_FPU_USAGE();
7535 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7536 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7537 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7538 IEM_MC_ADVANCE_RIP();
7539 IEM_MC_END();
7540 return VINF_SUCCESS;
7541
7542 default:
7543 return IEMOP_RAISE_INVALID_OPCODE();
7544 }
7545}
7546#endif
7547
7548
7549/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
7550FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
7551{
7552 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7553 /** @todo testcase: Check that the instruction implicitly clears the high
7554 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7555 * and opcode modifications are made to work with the whole width (not
7556 * just 128). */
7557 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
7558 /* Docs says register only. */
7559 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7560 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7561 {
7562 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7563 IEM_MC_BEGIN(2, 0);
7564 IEM_MC_ARG(uint64_t *, pDst, 0);
7565 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7566 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7567 IEM_MC_PREPARE_FPU_USAGE();
7568 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7569 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7570 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7571 IEM_MC_ADVANCE_RIP();
7572 IEM_MC_END();
7573 return VINF_SUCCESS;
7574 }
7575 return IEMOP_RAISE_INVALID_OPCODE();
7576}
7577
7578/** Opcode 0x66 0x0f 0xd7 - */
7579FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
7580{
7581 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7582 /** @todo testcase: Check that the instruction implicitly clears the high
7583 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7584 * and opcode modifications are made to work with the whole width (not
7585 * just 128). */
7586 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
7587 /* Docs says register only. */
7588 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7589 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7590 {
7591 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7592 IEM_MC_BEGIN(2, 0);
7593 IEM_MC_ARG(uint64_t *, pDst, 0);
7594 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
7595 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7596 IEM_MC_PREPARE_SSE_USAGE();
7597 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7598 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7599 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7600 IEM_MC_ADVANCE_RIP();
7601 IEM_MC_END();
7602 return VINF_SUCCESS;
7603 }
7604 return IEMOP_RAISE_INVALID_OPCODE();
7605}
7606
7607/* Opcode 0xf3 0x0f 0xd7 - invalid */
7608/* Opcode 0xf2 0x0f 0xd7 - invalid */
7609
7610
7611/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7612FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7613/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
7614FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
7615/* Opcode 0xf3 0x0f 0xd8 - invalid */
7616/* Opcode 0xf2 0x0f 0xd8 - invalid */
7617
7618/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7619FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7620/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
7621FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
7622/* Opcode 0xf3 0x0f 0xd9 - invalid */
7623/* Opcode 0xf2 0x0f 0xd9 - invalid */
7624
7625/** Opcode 0x0f 0xda - pminub Pq, Qq */
7626FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7627/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
7628FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
7629/* Opcode 0xf3 0x0f 0xda - invalid */
7630/* Opcode 0xf2 0x0f 0xda - invalid */
7631
7632/** Opcode 0x0f 0xdb - pand Pq, Qq */
7633FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7634/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
7635FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
7636/* Opcode 0xf3 0x0f 0xdb - invalid */
7637/* Opcode 0xf2 0x0f 0xdb - invalid */
7638
7639/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7640FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7641/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
7642FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
7643/* Opcode 0xf3 0x0f 0xdc - invalid */
7644/* Opcode 0xf2 0x0f 0xdc - invalid */
7645
7646/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7647FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7648/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
7649FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
7650/* Opcode 0xf3 0x0f 0xdd - invalid */
7651/* Opcode 0xf2 0x0f 0xdd - invalid */
7652
7653/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7654FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7655/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
7656FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
7657/* Opcode 0xf3 0x0f 0xde - invalid */
7658/* Opcode 0xf2 0x0f 0xde - invalid */
7659
7660/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7661FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7662/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
7663FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
7664/* Opcode 0xf3 0x0f 0xdf - invalid */
7665/* Opcode 0xf2 0x0f 0xdf - invalid */
7666
7667/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7668FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7669/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
7670FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
7671/* Opcode 0xf3 0x0f 0xe0 - invalid */
7672/* Opcode 0xf2 0x0f 0xe0 - invalid */
7673
7674/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7675FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7676/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
7677FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
7678/* Opcode 0xf3 0x0f 0xe1 - invalid */
7679/* Opcode 0xf2 0x0f 0xe1 - invalid */
7680
7681/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7682FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7683/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
7684FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
7685/* Opcode 0xf3 0x0f 0xe2 - invalid */
7686/* Opcode 0xf2 0x0f 0xe2 - invalid */
7687
7688/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7689FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7690/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
7691FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
7692/* Opcode 0xf3 0x0f 0xe3 - invalid */
7693/* Opcode 0xf2 0x0f 0xe3 - invalid */
7694
7695/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7696FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7697/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
7698FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
7699/* Opcode 0xf3 0x0f 0xe4 - invalid */
7700/* Opcode 0xf2 0x0f 0xe4 - invalid */
7701
7702/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7703FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7704/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
7705FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
7706/* Opcode 0xf3 0x0f 0xe5 - invalid */
7707/* Opcode 0xf2 0x0f 0xe5 - invalid */
7708
7709/* Opcode 0x0f 0xe6 - invalid */
7710/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
7711FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
7712/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
7713FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
7714/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
7715FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
7716
7717
7718/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
7719FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
7720{
7721 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7722 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7723 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7724 {
7725 /* Register, memory. */
7726 IEM_MC_BEGIN(0, 2);
7727 IEM_MC_LOCAL(uint64_t, uSrc);
7728 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7729
7730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7732 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7733 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7734
7735 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7736 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7737
7738 IEM_MC_ADVANCE_RIP();
7739 IEM_MC_END();
7740 return VINF_SUCCESS;
7741 }
7742 /* The register, register encoding is invalid. */
7743 return IEMOP_RAISE_INVALID_OPCODE();
7744}
7745
7746/** Opcode 0x66 0x0f 0xe7 - vmovntdq Mx, Vx */
7747FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
7748{
7749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7750 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7751 {
7752 /* Register, memory. */
7753 IEMOP_MNEMONIC(vmovntdq_Mx_Vx, "vmovntdq Mx,Vx");
7754 IEM_MC_BEGIN(0, 2);
7755 IEM_MC_LOCAL(RTUINT128U, uSrc);
7756 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7757
7758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7760 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7761 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7762
7763 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7764 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7765
7766 IEM_MC_ADVANCE_RIP();
7767 IEM_MC_END();
7768 return VINF_SUCCESS;
7769 }
7770
7771 /* The register, register encoding is invalid. */
7772 return IEMOP_RAISE_INVALID_OPCODE();
7773}
7774
7775/* Opcode 0xf3 0x0f 0xe7 - invalid */
7776/* Opcode 0xf2 0x0f 0xe7 - invalid */
7777
7778
7779/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
7780FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
7781/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
7782FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
7783/* Opcode 0xf3 0x0f 0xe8 - invalid */
7784/* Opcode 0xf2 0x0f 0xe8 - invalid */
7785
7786/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
7787FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
7788/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
7789FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
7790/* Opcode 0xf3 0x0f 0xe9 - invalid */
7791/* Opcode 0xf2 0x0f 0xe9 - invalid */
7792
7793/** Opcode 0x0f 0xea - pminsw Pq, Qq */
7794FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
7795/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
7796FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
7797/* Opcode 0xf3 0x0f 0xea - invalid */
7798/* Opcode 0xf2 0x0f 0xea - invalid */
7799
7800/** Opcode 0x0f 0xeb - por Pq, Qq */
7801FNIEMOP_STUB(iemOp_por_Pq_Qq);
7802/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
7803FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
7804/* Opcode 0xf3 0x0f 0xeb - invalid */
7805/* Opcode 0xf2 0x0f 0xeb - invalid */
7806
7807/** Opcode 0x0f 0xec - paddsb Pq, Qq */
7808FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
7809/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
7810FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
7811/* Opcode 0xf3 0x0f 0xec - invalid */
7812/* Opcode 0xf2 0x0f 0xec - invalid */
7813
7814/** Opcode 0x0f 0xed - paddsw Pq, Qq */
7815FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
7816/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
7817FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
7818/* Opcode 0xf3 0x0f 0xed - invalid */
7819/* Opcode 0xf2 0x0f 0xed - invalid */
7820
7821/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
7822FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
7823/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
7824FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
7825/* Opcode 0xf3 0x0f 0xee - invalid */
7826/* Opcode 0xf2 0x0f 0xee - invalid */
7827
7828
7829/** Opcode 0x0f 0xef - pxor Pq, Qq */
7830FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
7831{
7832 IEMOP_MNEMONIC(pxor, "pxor");
7833 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
7834}
7835
7836/** Opcode 0x66 0x0f 0xef - vpxor Vx, Hx, Wx */
7837FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
7838{
7839 IEMOP_MNEMONIC(vpxor, "vpxor");
7840 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7841}
7842
7843/* Opcode 0xf3 0x0f 0xef - invalid */
7844/* Opcode 0xf2 0x0f 0xef - invalid */
7845
7846/* Opcode 0x0f 0xf0 - invalid */
7847/* Opcode 0x66 0x0f 0xf0 - invalid */
7848/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
7849FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
7850
7851/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
7852FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
7853/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
7854FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
7855/* Opcode 0xf2 0x0f 0xf1 - invalid */
7856
7857/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
7858FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
7859/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
7860FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
7861/* Opcode 0xf2 0x0f 0xf2 - invalid */
7862
7863/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
7864FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
7865/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
7866FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
7867/* Opcode 0xf2 0x0f 0xf3 - invalid */
7868
7869/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
7870FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
7871/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
7872FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
7873/* Opcode 0xf2 0x0f 0xf4 - invalid */
7874
7875/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
7876FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
7877/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
7878FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
7879/* Opcode 0xf2 0x0f 0xf5 - invalid */
7880
7881/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
7882FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
7883/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
7884FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
7885/* Opcode 0xf2 0x0f 0xf6 - invalid */
7886
7887/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
7888FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
7889/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
7890FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
7891/* Opcode 0xf2 0x0f 0xf7 - invalid */
7892
7893/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
7894FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
7895/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
7896FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
7897/* Opcode 0xf2 0x0f 0xf8 - invalid */
7898
7899/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
7900FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
7901/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
7902FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
7903/* Opcode 0xf2 0x0f 0xf9 - invalid */
7904
7905/** Opcode 0x0f 0xfa - psubd Pq, Qq */
7906FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
7907/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
7908FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
7909/* Opcode 0xf2 0x0f 0xfa - invalid */
7910
7911/** Opcode 0x0f 0xfb - psubq Pq, Qq */
7912FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
7913/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
7914FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
7915/* Opcode 0xf2 0x0f 0xfb - invalid */
7916
7917/** Opcode 0x0f 0xfc - paddb Pq, Qq */
7918FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
7919/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
7920FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
7921/* Opcode 0xf2 0x0f 0xfc - invalid */
7922
7923/** Opcode 0x0f 0xfd - paddw Pq, Qq */
7924FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
7925/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
7926FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
7927/* Opcode 0xf2 0x0f 0xfd - invalid */
7928
7929/** Opcode 0x0f 0xfe - paddd Pq, Qq */
7930FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
7931/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
7932FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
7933/* Opcode 0xf2 0x0f 0xfe - invalid */
7934
7935
7936/** Opcode **** 0x0f 0xff - UD0 */
7937FNIEMOP_DEF(iemOp_ud0)
7938{
7939 IEMOP_MNEMONIC(ud0, "ud0");
7940 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
7941 {
7942 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
7943#ifndef TST_IEM_CHECK_MC
7944 RTGCPTR GCPtrEff;
7945 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
7946 if (rcStrict != VINF_SUCCESS)
7947 return rcStrict;
7948#endif
7949 IEMOP_HLP_DONE_DECODING();
7950 }
7951 return IEMOP_RAISE_INVALID_OPCODE();
7952}
7953
7954
7955
7956/**
7957 * Two byte opcode map, first byte 0x0f.
7958 *
7959 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
7960 * check if it needs updating as well when making changes.
7961 */
7962IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
7963{
7964 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7965 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
7966 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
7967 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
7968 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
7969 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
7970 /* 0x05 */ IEMOP_X4(iemOp_syscall),
7971 /* 0x06 */ IEMOP_X4(iemOp_clts),
7972 /* 0x07 */ IEMOP_X4(iemOp_sysret),
7973 /* 0x08 */ IEMOP_X4(iemOp_invd),
7974 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
7975 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
7976 /* 0x0b */ IEMOP_X4(iemOp_ud2),
7977 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
7978 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
7979 /* 0x0e */ IEMOP_X4(iemOp_femms),
7980 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
7981
7982 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
7983 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
7984 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7985 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7986 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7987 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7988 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7989 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7990 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
7991 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
7992 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
7993 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
7994 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
7995 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
7996 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
7997 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
7998
7999 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8000 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8001 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8002 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8003 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8004 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8005 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8006 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8007 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8008 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8009 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
8010 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8011 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
8012 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
8013 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8014 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8015
8016 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8017 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8018 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8019 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8020 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8021 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8022 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8023 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8024 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
8025 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8026 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
8027 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8028 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8029 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8030 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8031 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8032
8033 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
8034 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
8035 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
8036 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
8037 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
8038 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
8039 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
8040 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
8041 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
8042 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
8043 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8044 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8045 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8046 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8047 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8048 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8049
8050 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8051 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
8052 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8053 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8054 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8055 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8056 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8057 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8058 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8059 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8060 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8061 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8062 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8063 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8064 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8065 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8066
8067 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8068 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8069 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8070 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8071 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8072 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8073 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8074 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8075 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8076 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8077 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8078 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8079 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8080 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8081 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8082 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
8083
8084 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
8085 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8086 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8087 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8088 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8089 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8090 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8091 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8092
8093 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8094 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8095 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8096 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8097 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8098 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8099 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
8100 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
8101
8102 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8103 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8104 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8105 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8106 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8107 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8108 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8109 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8110 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8111 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8112 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8113 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8114 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8115 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8116 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8117 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8118
8119 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8120 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8121 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8122 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8123 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8124 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8125 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8126 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8127 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8128 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8129 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8130 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8131 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8132 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8133 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8134 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8135
8136 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8137 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8138 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8139 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8140 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8141 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8142 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8143 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8144 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8145 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8146 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8147 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8148 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8149 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8150 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8151 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8152
8153 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8154 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8155 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8156 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8157 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8158 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8159 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8160 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8161 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8162 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8163 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8164 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8165 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8166 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8167 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8168 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8169
8170 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8171 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8172 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8173 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8174 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8175 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8176 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8177 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8178 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8179 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8180 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8181 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8182 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8183 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8184 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8185 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8186
8187 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8188 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8189 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8190 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8191 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8192 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8193 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8194 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8195 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8196 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8197 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8198 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8199 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8200 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8201 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8202 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8203
8204 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8205 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8206 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8207 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8208 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8209 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8210 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8211 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8212 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8213 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8214 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8215 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8216 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8217 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8218 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8219 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8220
8221 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8222 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8223 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8224 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8225 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8226 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8227 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8228 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8229 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8230 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8231 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8232 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8233 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8234 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8235 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8236 /* 0xff */ IEMOP_X4(iemOp_ud0),
8237};
8238AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8239
8240
8241/**
8242 * VEX opcode map \#1.
8243 *
8244 * @remarks This is (currently) a subset of g_apfnTwoByteMap, so please check if
8245 * it it needs updating too when making changes.
8246 */
8247IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =
8248{
8249 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8250 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
8251 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
8252 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
8253 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
8254 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
8255 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
8256 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
8257 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
8258 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
8259 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
8260 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
8261 /* 0x0b */ IEMOP_X4(iemOp_InvalidNeedRM),
8262 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
8263 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
8264 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
8265 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
8266
8267 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
8268 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
8269 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
8270 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8271 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8272 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8273 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
8274 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8275 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
8276 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
8277 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
8278 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
8279 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
8280 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
8281 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
8282 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
8283
8284 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
8285 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
8286 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
8287 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
8288 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
8289 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
8290 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
8291 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
8292 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8293 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8294 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
8295 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8296 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
8297 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
8298 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8299 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8300
8301 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
8302 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
8303 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
8304 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
8305 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
8306 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
8307 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
8308 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
8309 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8310 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8311 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8312 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8313 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8314 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8315 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8316 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8317
8318 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
8319 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
8320 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
8321 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
8322 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
8323 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
8324 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
8325 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
8326 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
8327 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
8328 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
8329 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
8330 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
8331 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
8332 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
8333 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
8334
8335 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8336 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
8337 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8338 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8339 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8340 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8341 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8342 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8343 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8344 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8345 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8346 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8347 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8348 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8349 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8350 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8351
8352 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8353 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8354 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8355 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8356 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8357 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8358 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8359 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8360 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8361 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8362 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8363 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8364 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8365 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8366 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8367 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
8368
8369 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
8370 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_Grp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8371 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_Grp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8372 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_Grp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8373 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8374 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8375 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8376 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8377 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
8378 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
8379 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
8380 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
8381 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8382 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8383 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
8384 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
8385
8386 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
8387 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
8388 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
8389 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
8390 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
8391 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
8392 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
8393 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
8394 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
8395 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
8396 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
8397 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
8398 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
8399 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
8400 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
8401 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
8402 IEMOP_X4(iemOp_InvalidNeedRM),
8403 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
8404 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
8405 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
8406 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
8407 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
8408 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
8409 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
8410 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
8411 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
8412 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
8413 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
8414 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
8415 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
8416 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
8417 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
8418 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
8419
8420 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8421 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8422 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8423 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8424 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8425 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8426 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8427 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8428 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8429 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8430 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
8431 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
8432 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
8433 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
8434 /* 0xae */ IEMOP_X4(iemOp_Grp15), /** @todo groups and vex */
8435 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
8436
8437 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8438 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8439 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8440 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8441 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8442 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8443 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8444 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8445 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8446 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8447 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
8448 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
8449 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
8450 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
8451 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
8452 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
8453
8454 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8455 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8456 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8457 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8458 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8459 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8460 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8461 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8462 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8463 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8464 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
8465 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
8466 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
8467 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
8468 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
8469 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
8470
8471 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8472 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8473 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8474 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8475 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8476 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8477 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8478 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8479 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8480 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8481 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8482 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8483 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8484 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8485 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8486 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8487
8488 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8489 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8490 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8491 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8492 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8493 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8494 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8495 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8496 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8497 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8498 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8499 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8500 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8501 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8502 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8503 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8504
8505 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8506 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8507 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8508 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8509 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8510 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8511 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8512 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8513 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8514 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8515 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8516 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8517 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8518 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8519 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8520 /* 0xff */ IEMOP_X4(iemOp_ud0),
8521};
8522AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8523/** @} */
8524
8525
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette