VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 66315

Last change on this file since 66315 was 66315, checked in by vboxsync, 8 years ago

IEM: Implemented movsldup Vdq,Wdq (f3 0f 12)

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 316.0 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 66315 2017-03-28 21:34:58Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/** @name ..... opcodes.
26 *
27 * @{
28 */
29
30/** @} */
31
32
33/** @name Two byte opcodes (first byte 0x0f).
34 *
35 * @{
36 */
37
38/** Opcode 0x0f 0x00 /0. */
39FNIEMOPRM_DEF(iemOp_Grp6_sldt)
40{
41 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
42 IEMOP_HLP_MIN_286();
43 IEMOP_HLP_NO_REAL_OR_V86_MODE();
44
45 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
46 {
47 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
48 switch (pVCpu->iem.s.enmEffOpSize)
49 {
50 case IEMMODE_16BIT:
51 IEM_MC_BEGIN(0, 1);
52 IEM_MC_LOCAL(uint16_t, u16Ldtr);
53 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
54 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
55 IEM_MC_ADVANCE_RIP();
56 IEM_MC_END();
57 break;
58
59 case IEMMODE_32BIT:
60 IEM_MC_BEGIN(0, 1);
61 IEM_MC_LOCAL(uint32_t, u32Ldtr);
62 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
63 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
64 IEM_MC_ADVANCE_RIP();
65 IEM_MC_END();
66 break;
67
68 case IEMMODE_64BIT:
69 IEM_MC_BEGIN(0, 1);
70 IEM_MC_LOCAL(uint64_t, u64Ldtr);
71 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
72 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
73 IEM_MC_ADVANCE_RIP();
74 IEM_MC_END();
75 break;
76
77 IEM_NOT_REACHED_DEFAULT_CASE_RET();
78 }
79 }
80 else
81 {
82 IEM_MC_BEGIN(0, 2);
83 IEM_MC_LOCAL(uint16_t, u16Ldtr);
84 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
85 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
86 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
87 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
88 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
89 IEM_MC_ADVANCE_RIP();
90 IEM_MC_END();
91 }
92 return VINF_SUCCESS;
93}
94
95
96/** Opcode 0x0f 0x00 /1. */
97FNIEMOPRM_DEF(iemOp_Grp6_str)
98{
99 IEMOP_MNEMONIC(str, "str Rv/Mw");
100 IEMOP_HLP_MIN_286();
101 IEMOP_HLP_NO_REAL_OR_V86_MODE();
102
103 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
104 {
105 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
106 switch (pVCpu->iem.s.enmEffOpSize)
107 {
108 case IEMMODE_16BIT:
109 IEM_MC_BEGIN(0, 1);
110 IEM_MC_LOCAL(uint16_t, u16Tr);
111 IEM_MC_FETCH_TR_U16(u16Tr);
112 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
113 IEM_MC_ADVANCE_RIP();
114 IEM_MC_END();
115 break;
116
117 case IEMMODE_32BIT:
118 IEM_MC_BEGIN(0, 1);
119 IEM_MC_LOCAL(uint32_t, u32Tr);
120 IEM_MC_FETCH_TR_U32(u32Tr);
121 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
122 IEM_MC_ADVANCE_RIP();
123 IEM_MC_END();
124 break;
125
126 case IEMMODE_64BIT:
127 IEM_MC_BEGIN(0, 1);
128 IEM_MC_LOCAL(uint64_t, u64Tr);
129 IEM_MC_FETCH_TR_U64(u64Tr);
130 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
131 IEM_MC_ADVANCE_RIP();
132 IEM_MC_END();
133 break;
134
135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
136 }
137 }
138 else
139 {
140 IEM_MC_BEGIN(0, 2);
141 IEM_MC_LOCAL(uint16_t, u16Tr);
142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
144 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
145 IEM_MC_FETCH_TR_U16(u16Tr);
146 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
147 IEM_MC_ADVANCE_RIP();
148 IEM_MC_END();
149 }
150 return VINF_SUCCESS;
151}
152
153
154/** Opcode 0x0f 0x00 /2. */
155FNIEMOPRM_DEF(iemOp_Grp6_lldt)
156{
157 IEMOP_MNEMONIC(lldt, "lldt Ew");
158 IEMOP_HLP_MIN_286();
159 IEMOP_HLP_NO_REAL_OR_V86_MODE();
160
161 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
162 {
163 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
164 IEM_MC_BEGIN(1, 0);
165 IEM_MC_ARG(uint16_t, u16Sel, 0);
166 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
167 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
168 IEM_MC_END();
169 }
170 else
171 {
172 IEM_MC_BEGIN(1, 1);
173 IEM_MC_ARG(uint16_t, u16Sel, 0);
174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
176 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
177 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
178 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
179 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
180 IEM_MC_END();
181 }
182 return VINF_SUCCESS;
183}
184
185
186/** Opcode 0x0f 0x00 /3. */
187FNIEMOPRM_DEF(iemOp_Grp6_ltr)
188{
189 IEMOP_MNEMONIC(ltr, "ltr Ew");
190 IEMOP_HLP_MIN_286();
191 IEMOP_HLP_NO_REAL_OR_V86_MODE();
192
193 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
194 {
195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
196 IEM_MC_BEGIN(1, 0);
197 IEM_MC_ARG(uint16_t, u16Sel, 0);
198 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
199 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
200 IEM_MC_END();
201 }
202 else
203 {
204 IEM_MC_BEGIN(1, 1);
205 IEM_MC_ARG(uint16_t, u16Sel, 0);
206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
209 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
210 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
211 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
212 IEM_MC_END();
213 }
214 return VINF_SUCCESS;
215}
216
217
218/** Opcode 0x0f 0x00 /3. */
219FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
220{
221 IEMOP_HLP_MIN_286();
222 IEMOP_HLP_NO_REAL_OR_V86_MODE();
223
224 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
225 {
226 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
227 IEM_MC_BEGIN(2, 0);
228 IEM_MC_ARG(uint16_t, u16Sel, 0);
229 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
230 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
231 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
232 IEM_MC_END();
233 }
234 else
235 {
236 IEM_MC_BEGIN(2, 1);
237 IEM_MC_ARG(uint16_t, u16Sel, 0);
238 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
241 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
242 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
243 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
244 IEM_MC_END();
245 }
246 return VINF_SUCCESS;
247}
248
249
250/** Opcode 0x0f 0x00 /4. */
251FNIEMOPRM_DEF(iemOp_Grp6_verr)
252{
253 IEMOP_MNEMONIC(verr, "verr Ew");
254 IEMOP_HLP_MIN_286();
255 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
256}
257
258
259/** Opcode 0x0f 0x00 /5. */
260FNIEMOPRM_DEF(iemOp_Grp6_verw)
261{
262 IEMOP_MNEMONIC(verw, "verw Ew");
263 IEMOP_HLP_MIN_286();
264 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
265}
266
267
268/**
269 * Group 6 jump table.
270 */
271IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
272{
273 iemOp_Grp6_sldt,
274 iemOp_Grp6_str,
275 iemOp_Grp6_lldt,
276 iemOp_Grp6_ltr,
277 iemOp_Grp6_verr,
278 iemOp_Grp6_verw,
279 iemOp_InvalidWithRM,
280 iemOp_InvalidWithRM
281};
282
283/** Opcode 0x0f 0x00. */
284FNIEMOP_DEF(iemOp_Grp6)
285{
286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
287 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
288}
289
290
291/** Opcode 0x0f 0x01 /0. */
292FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
293{
294 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
295 IEMOP_HLP_MIN_286();
296 IEMOP_HLP_64BIT_OP_SIZE();
297 IEM_MC_BEGIN(2, 1);
298 IEM_MC_ARG(uint8_t, iEffSeg, 0);
299 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
302 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
303 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
304 IEM_MC_END();
305 return VINF_SUCCESS;
306}
307
308
309/** Opcode 0x0f 0x01 /0. */
310FNIEMOP_DEF(iemOp_Grp7_vmcall)
311{
312 IEMOP_BITCH_ABOUT_STUB();
313 return IEMOP_RAISE_INVALID_OPCODE();
314}
315
316
317/** Opcode 0x0f 0x01 /0. */
318FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
319{
320 IEMOP_BITCH_ABOUT_STUB();
321 return IEMOP_RAISE_INVALID_OPCODE();
322}
323
324
325/** Opcode 0x0f 0x01 /0. */
326FNIEMOP_DEF(iemOp_Grp7_vmresume)
327{
328 IEMOP_BITCH_ABOUT_STUB();
329 return IEMOP_RAISE_INVALID_OPCODE();
330}
331
332
333/** Opcode 0x0f 0x01 /0. */
334FNIEMOP_DEF(iemOp_Grp7_vmxoff)
335{
336 IEMOP_BITCH_ABOUT_STUB();
337 return IEMOP_RAISE_INVALID_OPCODE();
338}
339
340
341/** Opcode 0x0f 0x01 /1. */
342FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
343{
344 IEMOP_MNEMONIC(sidt, "sidt Ms");
345 IEMOP_HLP_MIN_286();
346 IEMOP_HLP_64BIT_OP_SIZE();
347 IEM_MC_BEGIN(2, 1);
348 IEM_MC_ARG(uint8_t, iEffSeg, 0);
349 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
352 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
353 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
354 IEM_MC_END();
355 return VINF_SUCCESS;
356}
357
358
359/** Opcode 0x0f 0x01 /1. */
360FNIEMOP_DEF(iemOp_Grp7_monitor)
361{
362 IEMOP_MNEMONIC(monitor, "monitor");
363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
364 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
365}
366
367
368/** Opcode 0x0f 0x01 /1. */
369FNIEMOP_DEF(iemOp_Grp7_mwait)
370{
371 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
373 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
374}
375
376
377/** Opcode 0x0f 0x01 /2. */
378FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
379{
380 IEMOP_MNEMONIC(lgdt, "lgdt");
381 IEMOP_HLP_64BIT_OP_SIZE();
382 IEM_MC_BEGIN(3, 1);
383 IEM_MC_ARG(uint8_t, iEffSeg, 0);
384 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
385 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
388 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
389 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
390 IEM_MC_END();
391 return VINF_SUCCESS;
392}
393
394
395/** Opcode 0x0f 0x01 0xd0. */
396FNIEMOP_DEF(iemOp_Grp7_xgetbv)
397{
398 IEMOP_MNEMONIC(xgetbv, "xgetbv");
399 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
400 {
401 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
402 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
403 }
404 return IEMOP_RAISE_INVALID_OPCODE();
405}
406
407
408/** Opcode 0x0f 0x01 0xd1. */
409FNIEMOP_DEF(iemOp_Grp7_xsetbv)
410{
411 IEMOP_MNEMONIC(xsetbv, "xsetbv");
412 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
413 {
414 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
416 }
417 return IEMOP_RAISE_INVALID_OPCODE();
418}
419
420
421/** Opcode 0x0f 0x01 /3. */
422FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
423{
424 IEMOP_MNEMONIC(lidt, "lidt");
425 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
426 ? IEMMODE_64BIT
427 : pVCpu->iem.s.enmEffOpSize;
428 IEM_MC_BEGIN(3, 1);
429 IEM_MC_ARG(uint8_t, iEffSeg, 0);
430 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
431 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
434 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
435 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
436 IEM_MC_END();
437 return VINF_SUCCESS;
438}
439
440
441#ifdef VBOX_WITH_NESTED_HWVIRT
442/** Opcode 0x0f 0x01 0xd8. */
443FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
444{
445 IEMOP_MNEMONIC(vmrun, "vmrun");
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
447}
448
449/** Opcode 0x0f 0x01 0xd9. */
450FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
451{
452 IEMOP_MNEMONIC(vmmcall, "vmmcall");
453 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
454}
455
456
457/** Opcode 0x0f 0x01 0xda. */
458FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
459{
460 IEMOP_MNEMONIC(vmload, "vmload");
461 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
462}
463
464
465/** Opcode 0x0f 0x01 0xdb. */
466FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
467{
468 IEMOP_MNEMONIC(vmsave, "vmsave");
469 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
470}
471
472
473/** Opcode 0x0f 0x01 0xdc. */
474FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
475{
476 IEMOP_MNEMONIC(stgi, "stgi");
477 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
478}
479
480
481/** Opcode 0x0f 0x01 0xdd. */
482FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
483{
484 IEMOP_MNEMONIC(clgi, "clgi");
485 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
486}
487
488
489/** Opcode 0x0f 0x01 0xdf. */
490FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
491{
492 IEMOP_MNEMONIC(invlpga, "invlpga");
493 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
494}
495#else
496/** Opcode 0x0f 0x01 0xd8. */
497FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
498
499/** Opcode 0x0f 0x01 0xd9. */
500FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
501
502/** Opcode 0x0f 0x01 0xda. */
503FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
504
505/** Opcode 0x0f 0x01 0xdb. */
506FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
507
508/** Opcode 0x0f 0x01 0xdc. */
509FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
510
511/** Opcode 0x0f 0x01 0xdd. */
512FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
513
514/** Opcode 0x0f 0x01 0xdf. */
515FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
516#endif /* VBOX_WITH_NESTED_HWVIRT */
517
518/** Opcode 0x0f 0x01 0xde. */
519FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
520
521/** Opcode 0x0f 0x01 /4. */
522FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
523{
524 IEMOP_MNEMONIC(smsw, "smsw");
525 IEMOP_HLP_MIN_286();
526 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
527 {
528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
529 switch (pVCpu->iem.s.enmEffOpSize)
530 {
531 case IEMMODE_16BIT:
532 IEM_MC_BEGIN(0, 1);
533 IEM_MC_LOCAL(uint16_t, u16Tmp);
534 IEM_MC_FETCH_CR0_U16(u16Tmp);
535 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
536 { /* likely */ }
537 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
538 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
539 else
540 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
541 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
542 IEM_MC_ADVANCE_RIP();
543 IEM_MC_END();
544 return VINF_SUCCESS;
545
546 case IEMMODE_32BIT:
547 IEM_MC_BEGIN(0, 1);
548 IEM_MC_LOCAL(uint32_t, u32Tmp);
549 IEM_MC_FETCH_CR0_U32(u32Tmp);
550 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
551 IEM_MC_ADVANCE_RIP();
552 IEM_MC_END();
553 return VINF_SUCCESS;
554
555 case IEMMODE_64BIT:
556 IEM_MC_BEGIN(0, 1);
557 IEM_MC_LOCAL(uint64_t, u64Tmp);
558 IEM_MC_FETCH_CR0_U64(u64Tmp);
559 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
560 IEM_MC_ADVANCE_RIP();
561 IEM_MC_END();
562 return VINF_SUCCESS;
563
564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
565 }
566 }
567 else
568 {
569 /* Ignore operand size here, memory refs are always 16-bit. */
570 IEM_MC_BEGIN(0, 2);
571 IEM_MC_LOCAL(uint16_t, u16Tmp);
572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
575 IEM_MC_FETCH_CR0_U16(u16Tmp);
576 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
577 { /* likely */ }
578 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
579 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
580 else
581 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
582 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
583 IEM_MC_ADVANCE_RIP();
584 IEM_MC_END();
585 return VINF_SUCCESS;
586 }
587}
588
589
590/** Opcode 0x0f 0x01 /6. */
591FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
592{
593 /* The operand size is effectively ignored, all is 16-bit and only the
594 lower 3-bits are used. */
595 IEMOP_MNEMONIC(lmsw, "lmsw");
596 IEMOP_HLP_MIN_286();
597 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
598 {
599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
600 IEM_MC_BEGIN(1, 0);
601 IEM_MC_ARG(uint16_t, u16Tmp, 0);
602 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
603 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
604 IEM_MC_END();
605 }
606 else
607 {
608 IEM_MC_BEGIN(1, 1);
609 IEM_MC_ARG(uint16_t, u16Tmp, 0);
610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
613 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
614 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
615 IEM_MC_END();
616 }
617 return VINF_SUCCESS;
618}
619
620
621/** Opcode 0x0f 0x01 /7. */
622FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
623{
624 IEMOP_MNEMONIC(invlpg, "invlpg");
625 IEMOP_HLP_MIN_486();
626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
627 IEM_MC_BEGIN(1, 1);
628 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
630 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
631 IEM_MC_END();
632 return VINF_SUCCESS;
633}
634
635
636/** Opcode 0x0f 0x01 /7. */
637FNIEMOP_DEF(iemOp_Grp7_swapgs)
638{
639 IEMOP_MNEMONIC(swapgs, "swapgs");
640 IEMOP_HLP_ONLY_64BIT();
641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
642 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
643}
644
645
646/** Opcode 0x0f 0x01 /7. */
647FNIEMOP_DEF(iemOp_Grp7_rdtscp)
648{
649 NOREF(pVCpu);
650 IEMOP_BITCH_ABOUT_STUB();
651 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
652}
653
654
655/**
656 * Group 7 jump table, memory variant.
657 */
658IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
659{
660 iemOp_Grp7_sgdt,
661 iemOp_Grp7_sidt,
662 iemOp_Grp7_lgdt,
663 iemOp_Grp7_lidt,
664 iemOp_Grp7_smsw,
665 iemOp_InvalidWithRM,
666 iemOp_Grp7_lmsw,
667 iemOp_Grp7_invlpg
668};
669
670
671/** Opcode 0x0f 0x01. */
672FNIEMOP_DEF(iemOp_Grp7)
673{
674 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
675 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
676 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
677
678 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
679 {
680 case 0:
681 switch (bRm & X86_MODRM_RM_MASK)
682 {
683 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
684 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
685 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
686 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
687 }
688 return IEMOP_RAISE_INVALID_OPCODE();
689
690 case 1:
691 switch (bRm & X86_MODRM_RM_MASK)
692 {
693 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
694 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
695 }
696 return IEMOP_RAISE_INVALID_OPCODE();
697
698 case 2:
699 switch (bRm & X86_MODRM_RM_MASK)
700 {
701 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
702 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
703 }
704 return IEMOP_RAISE_INVALID_OPCODE();
705
706 case 3:
707 switch (bRm & X86_MODRM_RM_MASK)
708 {
709 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
710 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
711 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
712 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
713 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
714 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
715 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
716 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
717 IEM_NOT_REACHED_DEFAULT_CASE_RET();
718 }
719
720 case 4:
721 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
722
723 case 5:
724 return IEMOP_RAISE_INVALID_OPCODE();
725
726 case 6:
727 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
728
729 case 7:
730 switch (bRm & X86_MODRM_RM_MASK)
731 {
732 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
733 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
734 }
735 return IEMOP_RAISE_INVALID_OPCODE();
736
737 IEM_NOT_REACHED_DEFAULT_CASE_RET();
738 }
739}
740
741/** Opcode 0x0f 0x00 /3. */
742FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
743{
744 IEMOP_HLP_NO_REAL_OR_V86_MODE();
745 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
746
747 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
748 {
749 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
750 switch (pVCpu->iem.s.enmEffOpSize)
751 {
752 case IEMMODE_16BIT:
753 {
754 IEM_MC_BEGIN(3, 0);
755 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
756 IEM_MC_ARG(uint16_t, u16Sel, 1);
757 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
758
759 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
760 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
761 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
762
763 IEM_MC_END();
764 return VINF_SUCCESS;
765 }
766
767 case IEMMODE_32BIT:
768 case IEMMODE_64BIT:
769 {
770 IEM_MC_BEGIN(3, 0);
771 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
772 IEM_MC_ARG(uint16_t, u16Sel, 1);
773 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
774
775 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
776 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
777 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
778
779 IEM_MC_END();
780 return VINF_SUCCESS;
781 }
782
783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
784 }
785 }
786 else
787 {
788 switch (pVCpu->iem.s.enmEffOpSize)
789 {
790 case IEMMODE_16BIT:
791 {
792 IEM_MC_BEGIN(3, 1);
793 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
794 IEM_MC_ARG(uint16_t, u16Sel, 1);
795 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
797
798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
799 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
800
801 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
802 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
803 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
804
805 IEM_MC_END();
806 return VINF_SUCCESS;
807 }
808
809 case IEMMODE_32BIT:
810 case IEMMODE_64BIT:
811 {
812 IEM_MC_BEGIN(3, 1);
813 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
814 IEM_MC_ARG(uint16_t, u16Sel, 1);
815 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
817
818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
819 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
820/** @todo testcase: make sure it's a 16-bit read. */
821
822 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
823 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
824 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
825
826 IEM_MC_END();
827 return VINF_SUCCESS;
828 }
829
830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
831 }
832 }
833}
834
835
836
837/** Opcode 0x0f 0x02. */
838FNIEMOP_DEF(iemOp_lar_Gv_Ew)
839{
840 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
841 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
842}
843
844
845/** Opcode 0x0f 0x03. */
846FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
847{
848 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
849 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
850}
851
852
853/** Opcode 0x0f 0x05. */
854FNIEMOP_DEF(iemOp_syscall)
855{
856 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
858 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
859}
860
861
862/** Opcode 0x0f 0x06. */
863FNIEMOP_DEF(iemOp_clts)
864{
865 IEMOP_MNEMONIC(clts, "clts");
866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
867 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
868}
869
870
871/** Opcode 0x0f 0x07. */
872FNIEMOP_DEF(iemOp_sysret)
873{
874 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
876 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
877}
878
879
880/** Opcode 0x0f 0x08. */
881FNIEMOP_STUB(iemOp_invd);
882// IEMOP_HLP_MIN_486();
883
884
885/** Opcode 0x0f 0x09. */
886FNIEMOP_DEF(iemOp_wbinvd)
887{
888 IEMOP_MNEMONIC(wbinvd, "wbinvd");
889 IEMOP_HLP_MIN_486();
890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
891 IEM_MC_BEGIN(0, 0);
892 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
893 IEM_MC_ADVANCE_RIP();
894 IEM_MC_END();
895 return VINF_SUCCESS; /* ignore for now */
896}
897
898
899/** Opcode 0x0f 0x0b. */
900FNIEMOP_DEF(iemOp_ud2)
901{
902 IEMOP_MNEMONIC(ud2, "ud2");
903 return IEMOP_RAISE_INVALID_OPCODE();
904}
905
906/** Opcode 0x0f 0x0d. */
907FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
908{
909 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
910 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
911 {
912 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
913 return IEMOP_RAISE_INVALID_OPCODE();
914 }
915
916 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
917 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
918 {
919 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
920 return IEMOP_RAISE_INVALID_OPCODE();
921 }
922
923 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
924 {
925 case 2: /* Aliased to /0 for the time being. */
926 case 4: /* Aliased to /0 for the time being. */
927 case 5: /* Aliased to /0 for the time being. */
928 case 6: /* Aliased to /0 for the time being. */
929 case 7: /* Aliased to /0 for the time being. */
930 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
931 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
932 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
933 IEM_NOT_REACHED_DEFAULT_CASE_RET();
934 }
935
936 IEM_MC_BEGIN(0, 1);
937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
940 /* Currently a NOP. */
941 NOREF(GCPtrEffSrc);
942 IEM_MC_ADVANCE_RIP();
943 IEM_MC_END();
944 return VINF_SUCCESS;
945}
946
947
948/** Opcode 0x0f 0x0e. */
949FNIEMOP_STUB(iemOp_femms);
950
951
952/** Opcode 0x0f 0x0f 0x0c. */
953FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
954
955/** Opcode 0x0f 0x0f 0x0d. */
956FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
957
958/** Opcode 0x0f 0x0f 0x1c. */
959FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
960
961/** Opcode 0x0f 0x0f 0x1d. */
962FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
963
964/** Opcode 0x0f 0x0f 0x8a. */
965FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
966
967/** Opcode 0x0f 0x0f 0x8e. */
968FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
969
970/** Opcode 0x0f 0x0f 0x90. */
971FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
972
973/** Opcode 0x0f 0x0f 0x94. */
974FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
975
976/** Opcode 0x0f 0x0f 0x96. */
977FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
978
979/** Opcode 0x0f 0x0f 0x97. */
980FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
981
982/** Opcode 0x0f 0x0f 0x9a. */
983FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
984
985/** Opcode 0x0f 0x0f 0x9e. */
986FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
987
988/** Opcode 0x0f 0x0f 0xa0. */
989FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
990
991/** Opcode 0x0f 0x0f 0xa4. */
992FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
993
994/** Opcode 0x0f 0x0f 0xa6. */
995FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
996
997/** Opcode 0x0f 0x0f 0xa7. */
998FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
999
1000/** Opcode 0x0f 0x0f 0xaa. */
1001FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1002
1003/** Opcode 0x0f 0x0f 0xae. */
1004FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1005
1006/** Opcode 0x0f 0x0f 0xb0. */
1007FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1008
1009/** Opcode 0x0f 0x0f 0xb4. */
1010FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1011
1012/** Opcode 0x0f 0x0f 0xb6. */
1013FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1014
1015/** Opcode 0x0f 0x0f 0xb7. */
1016FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1017
1018/** Opcode 0x0f 0x0f 0xbb. */
1019FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1020
1021/** Opcode 0x0f 0x0f 0xbf. */
1022FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1023
1024
1025/** Opcode 0x0f 0x0f. */
1026FNIEMOP_DEF(iemOp_3Dnow)
1027{
1028 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1029 {
1030 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1031 return IEMOP_RAISE_INVALID_OPCODE();
1032 }
1033
1034 /* This is pretty sparse, use switch instead of table. */
1035 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1036 switch (b)
1037 {
1038 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1039 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1040 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1041 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1042 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1043 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1044 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1045 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1046 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1047 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1048 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1049 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1050 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1051 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1052 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1053 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1054 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1055 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1056 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1057 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1058 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1059 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1060 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1061 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1062 default:
1063 return IEMOP_RAISE_INVALID_OPCODE();
1064 }
1065}
1066
1067
1068/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1069FNIEMOP_STUB(iemOp_vmovups_Vps_Wps);
1070/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1071FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
1072/** Opcode 0xf3 0x0f 0x10 - vmovss Vx, Hx, Wss */
1073FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss);
1074/** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1075FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
1076
1077
1078/**
1079 * @opcode 0x11
1080 * @oppfx none
1081 * @opcpuid sse
1082 * @opgroup og_sse_simdfp_datamove
1083 * @opxcpttype 4UA
1084 * @optest op1=1 op2=2 -> op1=2
1085 * @optest op1=0 op2=-42 -> op1=-42
1086 */
1087FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
1088{
1089 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1091 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1092 {
1093 /*
1094 * Register, register.
1095 */
1096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1097 IEM_MC_BEGIN(0, 0);
1098 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1099 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1100 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1101 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1102 IEM_MC_ADVANCE_RIP();
1103 IEM_MC_END();
1104 }
1105 else
1106 {
1107 /*
1108 * Memory, register.
1109 */
1110 IEM_MC_BEGIN(0, 2);
1111 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1113
1114 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1116 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1117 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1118
1119 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1120 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1121
1122 IEM_MC_ADVANCE_RIP();
1123 IEM_MC_END();
1124 }
1125 return VINF_SUCCESS;
1126}
1127
1128
1129/**
1130 * @opcode 0x11
1131 * @oppfx 0x66
1132 * @opcpuid sse2
1133 * @opgroup og_sse2_pcksclr_datamove
1134 * @opxcpttype 4UA
1135 * @optest op1=1 op2=2 -> op1=2
1136 * @optest op1=0 op2=-42 -> op1=-42
1137 */
1138FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
1139{
1140 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1141 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1142 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1143 {
1144 /*
1145 * Register, register.
1146 */
1147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1148 IEM_MC_BEGIN(0, 0);
1149 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1150 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1151 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1152 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1153 IEM_MC_ADVANCE_RIP();
1154 IEM_MC_END();
1155 }
1156 else
1157 {
1158 /*
1159 * Memory, register.
1160 */
1161 IEM_MC_BEGIN(0, 2);
1162 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1164
1165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1167 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1168 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1169
1170 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1171 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1172
1173 IEM_MC_ADVANCE_RIP();
1174 IEM_MC_END();
1175 }
1176 return VINF_SUCCESS;
1177}
1178
1179
1180/**
1181 * @opcode 0x11
1182 * @oppfx 0xf3
1183 * @opcpuid sse
1184 * @opgroup og_sse_simdfp_datamove
1185 * @opxcpttype 5
1186 * @optest op1=1 op2=2 -> op1=2
1187 * @optest op1=0 op2=-22 -> op1=-22
1188 */
1189FNIEMOP_DEF(iemOp_vmovss_Wss_Hx_Vss)
1190{
1191 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1192 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1193 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1194 {
1195 /*
1196 * Register, register.
1197 */
1198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1199 IEM_MC_BEGIN(0, 1);
1200 IEM_MC_LOCAL(uint32_t, uSrc);
1201
1202 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1203 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1204 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1205 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1206
1207 IEM_MC_ADVANCE_RIP();
1208 IEM_MC_END();
1209 }
1210 else
1211 {
1212 /*
1213 * Memory, register.
1214 */
1215 IEM_MC_BEGIN(0, 2);
1216 IEM_MC_LOCAL(uint32_t, uSrc);
1217 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1218
1219 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1221 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1222 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1223
1224 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1225 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1226
1227 IEM_MC_ADVANCE_RIP();
1228 IEM_MC_END();
1229 }
1230 return VINF_SUCCESS;
1231}
1232
1233
1234/**
1235 * @opcode 0x11
1236 * @oppfx 0xf2
1237 * @opcpuid sse2
1238 * @opgroup og_sse2_pcksclr_datamove
1239 * @opxcpttype 5
1240 * @optest op1=1 op2=2 -> op1=2
1241 * @optest op1=0 op2=-42 -> op1=-42
1242 */
1243FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
1244{
1245 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1246 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1247 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1248 {
1249 /*
1250 * Register, register.
1251 */
1252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1253 IEM_MC_BEGIN(0, 1);
1254 IEM_MC_LOCAL(uint64_t, uSrc);
1255
1256 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1257 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1258 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1259 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1260
1261 IEM_MC_ADVANCE_RIP();
1262 IEM_MC_END();
1263 }
1264 else
1265 {
1266 /*
1267 * Memory, register.
1268 */
1269 IEM_MC_BEGIN(0, 2);
1270 IEM_MC_LOCAL(uint64_t, uSrc);
1271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1272
1273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1275 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1276 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1277
1278 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1279 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1280
1281 IEM_MC_ADVANCE_RIP();
1282 IEM_MC_END();
1283 }
1284 return VINF_SUCCESS;
1285}
1286
1287
1288FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
1289{
1290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1291 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1292 {
1293 /**
1294 * @opcode 0x12
1295 * @opcodesub 11 mr/reg
1296 * @oppfx none
1297 * @opcpuid sse
1298 * @opgroup og_sse_simdfp_datamove
1299 * @opxcpttype 5
1300 * @optest op1=1 op2=2 -> op1=2
1301 * @optest op1=0 op2=-42 -> op1=-42
1302 */
1303 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1304
1305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1306 IEM_MC_BEGIN(0, 1);
1307 IEM_MC_LOCAL(uint64_t, uSrc);
1308
1309 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1310 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1311 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1312 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1313
1314 IEM_MC_ADVANCE_RIP();
1315 IEM_MC_END();
1316 }
1317 else
1318 {
1319 /**
1320 * @opdone
1321 * @opcode 0x12
1322 * @opcodesub !11 mr/reg
1323 * @oppfx none
1324 * @opcpuid sse
1325 * @opgroup og_sse_simdfp_datamove
1326 * @opxcpttype 5
1327 * @optest op1=1 op2=2 -> op1=2
1328 * @optest op1=0 op2=-42 -> op1=-42
1329 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1330 */
1331 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1332
1333 IEM_MC_BEGIN(0, 2);
1334 IEM_MC_LOCAL(uint64_t, uSrc);
1335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1336
1337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1339 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1340 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1341
1342 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1343 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1344
1345 IEM_MC_ADVANCE_RIP();
1346 IEM_MC_END();
1347 }
1348 return VINF_SUCCESS;
1349}
1350
1351
1352/**
1353 * @opcode 0x12
1354 * @opcodesub !11 mr/reg
1355 * @oppfx 0x66
1356 * @opcpuid sse2
1357 * @opgroup og_sse2_pcksclr_datamove
1358 * @opxcpttype 5
1359 * @optest op1=1 op2=2 -> op1=2
1360 * @optest op1=0 op2=-42 -> op1=-42
1361 */
1362FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1363{
1364 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1365 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1366 {
1367 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1368
1369 IEM_MC_BEGIN(0, 2);
1370 IEM_MC_LOCAL(uint64_t, uSrc);
1371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1372
1373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1375 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1376 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1377
1378 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1379 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1380
1381 IEM_MC_ADVANCE_RIP();
1382 IEM_MC_END();
1383 return VINF_SUCCESS;
1384 }
1385
1386 /**
1387 * @opdone
1388 * @opmnemonic ud660f12m3
1389 * @opcode 0x12
1390 * @opcodesub 11 mr/reg
1391 * @oppfx 0x66
1392 * @opunused immediate
1393 * @opcpuid sse
1394 * @optest ->
1395 */
1396 return IEMOP_RAISE_INVALID_OPCODE();
1397}
1398
1399
1400/**
1401 * @opcode 0x12
1402 * @oppfx 0xf3
1403 * @opcpuid sse3
1404 * @opgroup og_sse3_pcksclr_datamove
1405 * @opxcpttype 4
1406 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1407 * op1=0x00000002000000020000000100000001
1408 * @oponlytest
1409 */
1410FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1411{
1412 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1413 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1414 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1415 {
1416 /*
1417 * Register, register.
1418 */
1419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1420 IEM_MC_BEGIN(2, 0);
1421 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1422 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1423
1424 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1425 IEM_MC_PREPARE_SSE_USAGE();
1426
1427 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1428 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1429 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1430
1431 IEM_MC_ADVANCE_RIP();
1432 IEM_MC_END();
1433 }
1434 else
1435 {
1436 /*
1437 * Register, memory.
1438 */
1439 IEM_MC_BEGIN(2, 2);
1440 IEM_MC_LOCAL(RTUINT128U, uSrc);
1441 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1442 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1443 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1444
1445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1447 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1448 IEM_MC_PREPARE_SSE_USAGE();
1449
1450 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1451 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1452 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1453
1454 IEM_MC_ADVANCE_RIP();
1455 IEM_MC_END();
1456 }
1457 return VINF_SUCCESS;
1458
1459}
1460
1461/** Opcode 0xf2 0x0f 0x12. */
1462FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); //NEXT
1463
1464/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1465FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq);
1466
1467/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1468FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1469{
1470 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1472 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1473 {
1474#if 0
1475 /*
1476 * Register, register.
1477 */
1478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1479 IEM_MC_BEGIN(0, 1);
1480 IEM_MC_LOCAL(uint64_t, uSrc);
1481 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1482 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1483 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1484 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1485 IEM_MC_ADVANCE_RIP();
1486 IEM_MC_END();
1487#else
1488 return IEMOP_RAISE_INVALID_OPCODE();
1489#endif
1490 }
1491 else
1492 {
1493 /*
1494 * Memory, register.
1495 */
1496 IEM_MC_BEGIN(0, 2);
1497 IEM_MC_LOCAL(uint64_t, uSrc);
1498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1499
1500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1502 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1503 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1504
1505 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1506 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1507
1508 IEM_MC_ADVANCE_RIP();
1509 IEM_MC_END();
1510 }
1511 return VINF_SUCCESS;
1512}
1513
1514/* Opcode 0xf3 0x0f 0x13 - invalid */
1515/* Opcode 0xf2 0x0f 0x13 - invalid */
1516
1517/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1518FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1519/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1520FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1521/* Opcode 0xf3 0x0f 0x14 - invalid */
1522/* Opcode 0xf2 0x0f 0x14 - invalid */
1523/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1524FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1525/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1526FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1527/* Opcode 0xf3 0x0f 0x15 - invalid */
1528/* Opcode 0xf2 0x0f 0x15 - invalid */
1529/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1530FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1531/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1532FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1533/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1534FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1535/* Opcode 0xf2 0x0f 0x16 - invalid */
1536/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1537FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1538/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1539FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1540/* Opcode 0xf3 0x0f 0x17 - invalid */
1541/* Opcode 0xf2 0x0f 0x17 - invalid */
1542
1543
1544/** Opcode 0x0f 0x18. */
1545FNIEMOP_DEF(iemOp_prefetch_Grp16)
1546{
1547 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1548 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1549 {
1550 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1551 {
1552 case 4: /* Aliased to /0 for the time being according to AMD. */
1553 case 5: /* Aliased to /0 for the time being according to AMD. */
1554 case 6: /* Aliased to /0 for the time being according to AMD. */
1555 case 7: /* Aliased to /0 for the time being according to AMD. */
1556 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1557 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1558 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1559 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1560 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1561 }
1562
1563 IEM_MC_BEGIN(0, 1);
1564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1567 /* Currently a NOP. */
1568 NOREF(GCPtrEffSrc);
1569 IEM_MC_ADVANCE_RIP();
1570 IEM_MC_END();
1571 return VINF_SUCCESS;
1572 }
1573
1574 return IEMOP_RAISE_INVALID_OPCODE();
1575}
1576
1577
1578/** Opcode 0x0f 0x19..0x1f. */
1579FNIEMOP_DEF(iemOp_nop_Ev)
1580{
1581 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1582 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1583 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1584 {
1585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1586 IEM_MC_BEGIN(0, 0);
1587 IEM_MC_ADVANCE_RIP();
1588 IEM_MC_END();
1589 }
1590 else
1591 {
1592 IEM_MC_BEGIN(0, 1);
1593 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1596 /* Currently a NOP. */
1597 NOREF(GCPtrEffSrc);
1598 IEM_MC_ADVANCE_RIP();
1599 IEM_MC_END();
1600 }
1601 return VINF_SUCCESS;
1602}
1603
1604
1605/** Opcode 0x0f 0x20. */
1606FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1607{
1608 /* mod is ignored, as is operand size overrides. */
1609 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1610 IEMOP_HLP_MIN_386();
1611 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1612 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1613 else
1614 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1615
1616 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1617 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1618 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1619 {
1620 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1621 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1622 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1623 iCrReg |= 8;
1624 }
1625 switch (iCrReg)
1626 {
1627 case 0: case 2: case 3: case 4: case 8:
1628 break;
1629 default:
1630 return IEMOP_RAISE_INVALID_OPCODE();
1631 }
1632 IEMOP_HLP_DONE_DECODING();
1633
1634 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1635}
1636
1637
1638/** Opcode 0x0f 0x21. */
1639FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1640{
1641 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1642 IEMOP_HLP_MIN_386();
1643 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1645 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1646 return IEMOP_RAISE_INVALID_OPCODE();
1647 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1648 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1649 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1650}
1651
1652
1653/** Opcode 0x0f 0x22. */
1654FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1655{
1656 /* mod is ignored, as is operand size overrides. */
1657 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1658 IEMOP_HLP_MIN_386();
1659 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1660 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1661 else
1662 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1663
1664 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1665 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1666 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1667 {
1668 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1669 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1670 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1671 iCrReg |= 8;
1672 }
1673 switch (iCrReg)
1674 {
1675 case 0: case 2: case 3: case 4: case 8:
1676 break;
1677 default:
1678 return IEMOP_RAISE_INVALID_OPCODE();
1679 }
1680 IEMOP_HLP_DONE_DECODING();
1681
1682 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1683}
1684
1685
1686/** Opcode 0x0f 0x23. */
1687FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1688{
1689 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1690 IEMOP_HLP_MIN_386();
1691 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1693 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1694 return IEMOP_RAISE_INVALID_OPCODE();
1695 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1696 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1697 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1698}
1699
1700
1701/** Opcode 0x0f 0x24. */
1702FNIEMOP_DEF(iemOp_mov_Rd_Td)
1703{
1704 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1705 /** @todo works on 386 and 486. */
1706 /* The RM byte is not considered, see testcase. */
1707 return IEMOP_RAISE_INVALID_OPCODE();
1708}
1709
1710
1711/** Opcode 0x0f 0x26. */
1712FNIEMOP_DEF(iemOp_mov_Td_Rd)
1713{
1714 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1715 /** @todo works on 386 and 486. */
1716 /* The RM byte is not considered, see testcase. */
1717 return IEMOP_RAISE_INVALID_OPCODE();
1718}
1719
1720
1721/** Opcode 0x0f 0x28 - vmovaps Vps, Wps */
1722FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1723{
1724 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1725 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1726 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1727 {
1728 /*
1729 * Register, register.
1730 */
1731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1732 IEM_MC_BEGIN(0, 0);
1733 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1734 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1735 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1736 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1737 IEM_MC_ADVANCE_RIP();
1738 IEM_MC_END();
1739 }
1740 else
1741 {
1742 /*
1743 * Register, memory.
1744 */
1745 IEM_MC_BEGIN(0, 2);
1746 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1748
1749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1751 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1752 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1753
1754 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1755 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1756
1757 IEM_MC_ADVANCE_RIP();
1758 IEM_MC_END();
1759 }
1760 return VINF_SUCCESS;
1761}
1762
1763/** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */
1764FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1765{
1766 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1767 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1768 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1769 {
1770 /*
1771 * Register, register.
1772 */
1773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1774 IEM_MC_BEGIN(0, 0);
1775 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1776 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1777 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1778 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1779 IEM_MC_ADVANCE_RIP();
1780 IEM_MC_END();
1781 }
1782 else
1783 {
1784 /*
1785 * Register, memory.
1786 */
1787 IEM_MC_BEGIN(0, 2);
1788 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1789 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1790
1791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1793 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1794 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1795
1796 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1797 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1798
1799 IEM_MC_ADVANCE_RIP();
1800 IEM_MC_END();
1801 }
1802 return VINF_SUCCESS;
1803}
1804
1805/* Opcode 0xf3 0x0f 0x28 - invalid */
1806/* Opcode 0xf2 0x0f 0x28 - invalid */
1807
1808/** Opcode 0x0f 0x29 - vmovaps Wps, Vps */
1809FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1810{
1811 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1812 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1813 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1814 {
1815 /*
1816 * Register, register.
1817 */
1818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1819 IEM_MC_BEGIN(0, 0);
1820 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1821 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1822 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1823 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1824 IEM_MC_ADVANCE_RIP();
1825 IEM_MC_END();
1826 }
1827 else
1828 {
1829 /*
1830 * Memory, register.
1831 */
1832 IEM_MC_BEGIN(0, 2);
1833 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1835
1836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1838 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1839 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1840
1841 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1842 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1843
1844 IEM_MC_ADVANCE_RIP();
1845 IEM_MC_END();
1846 }
1847 return VINF_SUCCESS;
1848}
1849
1850/** Opcode 0x66 0x0f 0x29 - vmovapd Wpd,Vpd */
1851FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
1852{
1853 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
1854 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1855 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1856 {
1857 /*
1858 * Register, register.
1859 */
1860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1861 IEM_MC_BEGIN(0, 0);
1862 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1863 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1864 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1865 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1866 IEM_MC_ADVANCE_RIP();
1867 IEM_MC_END();
1868 }
1869 else
1870 {
1871 /*
1872 * Memory, register.
1873 */
1874 IEM_MC_BEGIN(0, 2);
1875 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1876 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1877
1878 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1880 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1881 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1882
1883 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1884 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1885
1886 IEM_MC_ADVANCE_RIP();
1887 IEM_MC_END();
1888 }
1889 return VINF_SUCCESS;
1890}
1891
1892/* Opcode 0xf3 0x0f 0x29 - invalid */
1893/* Opcode 0xf2 0x0f 0x29 - invalid */
1894
1895
1896/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
1897FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
1898/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
1899FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
1900/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
1901FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
1902/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
1903FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
1904
1905
1906/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
1907FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
1908{
1909 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
1910 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1911 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1912 {
1913 /*
1914 * memory, register.
1915 */
1916 IEM_MC_BEGIN(0, 2);
1917 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1919
1920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1922 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1923 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1924
1925 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1926 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1927
1928 IEM_MC_ADVANCE_RIP();
1929 IEM_MC_END();
1930 }
1931 /* The register, register encoding is invalid. */
1932 else
1933 return IEMOP_RAISE_INVALID_OPCODE();
1934 return VINF_SUCCESS;
1935}
1936
1937/** Opcode 0x66 0x0f 0x2b - vmovntpd Mpd, Vpd */
1938FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
1939{
1940 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
1941 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1942 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1943 {
1944 /*
1945 * memory, register.
1946 */
1947 IEM_MC_BEGIN(0, 2);
1948 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1950
1951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1953 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1954 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1955
1956 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1957 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1958
1959 IEM_MC_ADVANCE_RIP();
1960 IEM_MC_END();
1961 }
1962 /* The register, register encoding is invalid. */
1963 else
1964 return IEMOP_RAISE_INVALID_OPCODE();
1965 return VINF_SUCCESS;
1966}
1967/* Opcode 0xf3 0x0f 0x2b - invalid */
1968/* Opcode 0xf2 0x0f 0x2b - invalid */
1969
1970
1971/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
1972FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
1973/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
1974FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
1975/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
1976FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
1977/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
1978FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
1979
1980/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
1981FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
1982/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
1983FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
1984/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
1985FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
1986/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
1987FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
1988
1989/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
1990FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
1991/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
1992FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
1993/* Opcode 0xf3 0x0f 0x2e - invalid */
1994/* Opcode 0xf2 0x0f 0x2e - invalid */
1995
1996/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
1997FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
1998/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
1999FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
2000/* Opcode 0xf3 0x0f 0x2f - invalid */
2001/* Opcode 0xf2 0x0f 0x2f - invalid */
2002
2003/** Opcode 0x0f 0x30. */
2004FNIEMOP_DEF(iemOp_wrmsr)
2005{
2006 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2008 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2009}
2010
2011
2012/** Opcode 0x0f 0x31. */
2013FNIEMOP_DEF(iemOp_rdtsc)
2014{
2015 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2017 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2018}
2019
2020
2021/** Opcode 0x0f 0x33. */
2022FNIEMOP_DEF(iemOp_rdmsr)
2023{
2024 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2026 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2027}
2028
2029
2030/** Opcode 0x0f 0x34. */
2031FNIEMOP_STUB(iemOp_rdpmc);
2032/** Opcode 0x0f 0x34. */
2033FNIEMOP_STUB(iemOp_sysenter);
2034/** Opcode 0x0f 0x35. */
2035FNIEMOP_STUB(iemOp_sysexit);
2036/** Opcode 0x0f 0x37. */
2037FNIEMOP_STUB(iemOp_getsec);
2038/** Opcode 0x0f 0x38. */
2039FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
2040/** Opcode 0x0f 0x3a. */
2041FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
2042
2043
2044/**
2045 * Implements a conditional move.
2046 *
2047 * Wish there was an obvious way to do this where we could share and reduce
2048 * code bloat.
2049 *
2050 * @param a_Cnd The conditional "microcode" operation.
2051 */
2052#define CMOV_X(a_Cnd) \
2053 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2054 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2055 { \
2056 switch (pVCpu->iem.s.enmEffOpSize) \
2057 { \
2058 case IEMMODE_16BIT: \
2059 IEM_MC_BEGIN(0, 1); \
2060 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2061 a_Cnd { \
2062 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2063 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2064 } IEM_MC_ENDIF(); \
2065 IEM_MC_ADVANCE_RIP(); \
2066 IEM_MC_END(); \
2067 return VINF_SUCCESS; \
2068 \
2069 case IEMMODE_32BIT: \
2070 IEM_MC_BEGIN(0, 1); \
2071 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2072 a_Cnd { \
2073 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2074 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2075 } IEM_MC_ELSE() { \
2076 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2077 } IEM_MC_ENDIF(); \
2078 IEM_MC_ADVANCE_RIP(); \
2079 IEM_MC_END(); \
2080 return VINF_SUCCESS; \
2081 \
2082 case IEMMODE_64BIT: \
2083 IEM_MC_BEGIN(0, 1); \
2084 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2085 a_Cnd { \
2086 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2087 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2088 } IEM_MC_ENDIF(); \
2089 IEM_MC_ADVANCE_RIP(); \
2090 IEM_MC_END(); \
2091 return VINF_SUCCESS; \
2092 \
2093 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2094 } \
2095 } \
2096 else \
2097 { \
2098 switch (pVCpu->iem.s.enmEffOpSize) \
2099 { \
2100 case IEMMODE_16BIT: \
2101 IEM_MC_BEGIN(0, 2); \
2102 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2103 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2104 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2105 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2106 a_Cnd { \
2107 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2108 } IEM_MC_ENDIF(); \
2109 IEM_MC_ADVANCE_RIP(); \
2110 IEM_MC_END(); \
2111 return VINF_SUCCESS; \
2112 \
2113 case IEMMODE_32BIT: \
2114 IEM_MC_BEGIN(0, 2); \
2115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2116 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2118 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2119 a_Cnd { \
2120 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2121 } IEM_MC_ELSE() { \
2122 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2123 } IEM_MC_ENDIF(); \
2124 IEM_MC_ADVANCE_RIP(); \
2125 IEM_MC_END(); \
2126 return VINF_SUCCESS; \
2127 \
2128 case IEMMODE_64BIT: \
2129 IEM_MC_BEGIN(0, 2); \
2130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2131 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2133 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2134 a_Cnd { \
2135 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2136 } IEM_MC_ENDIF(); \
2137 IEM_MC_ADVANCE_RIP(); \
2138 IEM_MC_END(); \
2139 return VINF_SUCCESS; \
2140 \
2141 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2142 } \
2143 } do {} while (0)
2144
2145
2146
2147/** Opcode 0x0f 0x40. */
2148FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2149{
2150 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2151 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2152}
2153
2154
2155/** Opcode 0x0f 0x41. */
2156FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2157{
2158 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2159 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2160}
2161
2162
2163/** Opcode 0x0f 0x42. */
2164FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2165{
2166 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2167 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2168}
2169
2170
2171/** Opcode 0x0f 0x43. */
2172FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2173{
2174 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2175 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2176}
2177
2178
2179/** Opcode 0x0f 0x44. */
2180FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2181{
2182 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2183 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2184}
2185
2186
2187/** Opcode 0x0f 0x45. */
2188FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2189{
2190 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2191 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2192}
2193
2194
2195/** Opcode 0x0f 0x46. */
2196FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2197{
2198 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2199 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2200}
2201
2202
2203/** Opcode 0x0f 0x47. */
2204FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2205{
2206 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2207 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2208}
2209
2210
2211/** Opcode 0x0f 0x48. */
2212FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2213{
2214 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2215 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2216}
2217
2218
2219/** Opcode 0x0f 0x49. */
2220FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2221{
2222 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2223 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2224}
2225
2226
2227/** Opcode 0x0f 0x4a. */
2228FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2229{
2230 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2231 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2232}
2233
2234
2235/** Opcode 0x0f 0x4b. */
2236FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2237{
2238 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2239 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2240}
2241
2242
2243/** Opcode 0x0f 0x4c. */
2244FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2245{
2246 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2247 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2248}
2249
2250
2251/** Opcode 0x0f 0x4d. */
2252FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2253{
2254 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2255 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2256}
2257
2258
2259/** Opcode 0x0f 0x4e. */
2260FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2261{
2262 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2263 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2264}
2265
2266
2267/** Opcode 0x0f 0x4f. */
2268FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2269{
2270 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2271 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2272}
2273
2274#undef CMOV_X
2275
2276/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
2277FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
2278/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
2279FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
2280/* Opcode 0xf3 0x0f 0x50 - invalid */
2281/* Opcode 0xf2 0x0f 0x50 - invalid */
2282
2283/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
2284FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2285/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
2286FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2287/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
2288FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2289/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2290FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2291
2292/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
2293FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2294/* Opcode 0x66 0x0f 0x52 - invalid */
2295/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
2296FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2297/* Opcode 0xf2 0x0f 0x52 - invalid */
2298
2299/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
2300FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2301/* Opcode 0x66 0x0f 0x53 - invalid */
2302/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
2303FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2304/* Opcode 0xf2 0x0f 0x53 - invalid */
2305
2306/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
2307FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
2308/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
2309FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
2310/* Opcode 0xf3 0x0f 0x54 - invalid */
2311/* Opcode 0xf2 0x0f 0x54 - invalid */
2312
2313/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
2314FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
2315/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
2316FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
2317/* Opcode 0xf3 0x0f 0x55 - invalid */
2318/* Opcode 0xf2 0x0f 0x55 - invalid */
2319
2320/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
2321FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
2322/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
2323FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
2324/* Opcode 0xf3 0x0f 0x56 - invalid */
2325/* Opcode 0xf2 0x0f 0x56 - invalid */
2326
2327/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
2328FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
2329/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
2330FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
2331/* Opcode 0xf3 0x0f 0x57 - invalid */
2332/* Opcode 0xf2 0x0f 0x57 - invalid */
2333
2334/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
2335FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2336/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
2337FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2338/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
2339FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2340/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
2341FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2342
2343/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
2344FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2345/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
2346FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2347/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
2348FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2349/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
2350FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2351
2352/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
2353FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2354/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
2355FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2356/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
2357FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2358/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2359FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2360
2361/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
2362FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2363/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
2364FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2365/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
2366FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2367/* Opcode 0xf2 0x0f 0x5b - invalid */
2368
2369/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
2370FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2371/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
2372FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2373/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
2374FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2375/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
2376FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2377
2378/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
2379FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2380/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
2381FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2382/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
2383FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2384/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
2385FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2386
2387/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
2388FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2389/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
2390FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2391/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
2392FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2393/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
2394FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2395
2396/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
2397FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2398/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
2399FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2400/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
2401FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2402/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
2403FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2404
2405/**
2406 * Common worker for MMX instructions on the forms:
2407 * pxxxx mm1, mm2/mem32
2408 *
2409 * The 2nd operand is the first half of a register, which in the memory case
2410 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2411 * memory accessed for MMX.
2412 *
2413 * Exceptions type 4.
2414 */
2415FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2416{
2417 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2418 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2419 {
2420 /*
2421 * Register, register.
2422 */
2423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2424 IEM_MC_BEGIN(2, 0);
2425 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2426 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2427 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2428 IEM_MC_PREPARE_SSE_USAGE();
2429 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2430 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2431 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2432 IEM_MC_ADVANCE_RIP();
2433 IEM_MC_END();
2434 }
2435 else
2436 {
2437 /*
2438 * Register, memory.
2439 */
2440 IEM_MC_BEGIN(2, 2);
2441 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2442 IEM_MC_LOCAL(uint64_t, uSrc);
2443 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2445
2446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2448 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2449 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2450
2451 IEM_MC_PREPARE_SSE_USAGE();
2452 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2453 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2454
2455 IEM_MC_ADVANCE_RIP();
2456 IEM_MC_END();
2457 }
2458 return VINF_SUCCESS;
2459}
2460
2461
2462/**
2463 * Common worker for SSE2 instructions on the forms:
2464 * pxxxx xmm1, xmm2/mem128
2465 *
2466 * The 2nd operand is the first half of a register, which in the memory case
2467 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2468 * memory accessed for MMX.
2469 *
2470 * Exceptions type 4.
2471 */
2472FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2473{
2474 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2475 if (!pImpl->pfnU64)
2476 return IEMOP_RAISE_INVALID_OPCODE();
2477 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2478 {
2479 /*
2480 * Register, register.
2481 */
2482 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2483 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2485 IEM_MC_BEGIN(2, 0);
2486 IEM_MC_ARG(uint64_t *, pDst, 0);
2487 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2488 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2489 IEM_MC_PREPARE_FPU_USAGE();
2490 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2491 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2492 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2493 IEM_MC_ADVANCE_RIP();
2494 IEM_MC_END();
2495 }
2496 else
2497 {
2498 /*
2499 * Register, memory.
2500 */
2501 IEM_MC_BEGIN(2, 2);
2502 IEM_MC_ARG(uint64_t *, pDst, 0);
2503 IEM_MC_LOCAL(uint32_t, uSrc);
2504 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2505 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2506
2507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2509 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2510 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2511
2512 IEM_MC_PREPARE_FPU_USAGE();
2513 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2514 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2515
2516 IEM_MC_ADVANCE_RIP();
2517 IEM_MC_END();
2518 }
2519 return VINF_SUCCESS;
2520}
2521
2522
2523/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2524FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2525{
2526 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2527 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2528}
2529
2530/** Opcode 0x66 0x0f 0x60 - vpunpcklbw Vx, Hx, W */
2531FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2532{
2533 IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
2534 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2535}
2536
2537/* Opcode 0xf3 0x0f 0x60 - invalid */
2538
2539
2540/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2541FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2542{
2543 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2544 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2545}
2546
2547/** Opcode 0x66 0x0f 0x61 - vpunpcklwd Vx, Hx, Wx */
2548FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2549{
2550 IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
2551 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2552}
2553
2554/* Opcode 0xf3 0x0f 0x61 - invalid */
2555
2556
2557/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2558FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2559{
2560 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2561 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2562}
2563
2564/** Opcode 0x66 0x0f 0x62 - vpunpckldq Vx, Hx, Wx */
2565FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2566{
2567 IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
2568 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2569}
2570
2571/* Opcode 0xf3 0x0f 0x62 - invalid */
2572
2573
2574
2575/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2576FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2577/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2578FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2579/* Opcode 0xf3 0x0f 0x63 - invalid */
2580
2581/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2582FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2583/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2584FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2585/* Opcode 0xf3 0x0f 0x64 - invalid */
2586
2587/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2588FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2589/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2590FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2591/* Opcode 0xf3 0x0f 0x65 - invalid */
2592
2593/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2594FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2595/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2596FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2597/* Opcode 0xf3 0x0f 0x66 - invalid */
2598
2599/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2600FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2601/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2602FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2603/* Opcode 0xf3 0x0f 0x67 - invalid */
2604
2605
2606/**
2607 * Common worker for MMX instructions on the form:
2608 * pxxxx mm1, mm2/mem64
2609 *
2610 * The 2nd operand is the second half of a register, which in the memory case
2611 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2612 * where it may read the full 128 bits or only the upper 64 bits.
2613 *
2614 * Exceptions type 4.
2615 */
2616FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2617{
2618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2619 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2620 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2621 {
2622 /*
2623 * Register, register.
2624 */
2625 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2626 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2628 IEM_MC_BEGIN(2, 0);
2629 IEM_MC_ARG(uint64_t *, pDst, 0);
2630 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2631 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2632 IEM_MC_PREPARE_FPU_USAGE();
2633 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2634 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2635 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2636 IEM_MC_ADVANCE_RIP();
2637 IEM_MC_END();
2638 }
2639 else
2640 {
2641 /*
2642 * Register, memory.
2643 */
2644 IEM_MC_BEGIN(2, 2);
2645 IEM_MC_ARG(uint64_t *, pDst, 0);
2646 IEM_MC_LOCAL(uint64_t, uSrc);
2647 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2649
2650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2652 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2653 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2654
2655 IEM_MC_PREPARE_FPU_USAGE();
2656 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2657 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2658
2659 IEM_MC_ADVANCE_RIP();
2660 IEM_MC_END();
2661 }
2662 return VINF_SUCCESS;
2663}
2664
2665
2666/**
2667 * Common worker for SSE2 instructions on the form:
2668 * pxxxx xmm1, xmm2/mem128
2669 *
2670 * The 2nd operand is the second half of a register, which in the memory case
2671 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2672 * where it may read the full 128 bits or only the upper 64 bits.
2673 *
2674 * Exceptions type 4.
2675 */
2676FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2677{
2678 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2679 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2680 {
2681 /*
2682 * Register, register.
2683 */
2684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2685 IEM_MC_BEGIN(2, 0);
2686 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2687 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2688 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2689 IEM_MC_PREPARE_SSE_USAGE();
2690 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2691 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2692 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2693 IEM_MC_ADVANCE_RIP();
2694 IEM_MC_END();
2695 }
2696 else
2697 {
2698 /*
2699 * Register, memory.
2700 */
2701 IEM_MC_BEGIN(2, 2);
2702 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2703 IEM_MC_LOCAL(RTUINT128U, uSrc);
2704 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
2705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2706
2707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2709 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2710 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2711
2712 IEM_MC_PREPARE_SSE_USAGE();
2713 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2714 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2715
2716 IEM_MC_ADVANCE_RIP();
2717 IEM_MC_END();
2718 }
2719 return VINF_SUCCESS;
2720}
2721
2722
2723/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2724FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2725{
2726 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2727 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2728}
2729
2730/** Opcode 0x66 0x0f 0x68 - vpunpckhbw Vx, Hx, Wx */
2731FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2732{
2733 IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx");
2734 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2735}
2736/* Opcode 0xf3 0x0f 0x68 - invalid */
2737
2738
2739/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2740FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2741{
2742 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2743 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2744}
2745
2746/** Opcode 0x66 0x0f 0x69 - vpunpckhwd Vx, Hx, Wx */
2747FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2748{
2749 IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx");
2750 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2751
2752}
2753/* Opcode 0xf3 0x0f 0x69 - invalid */
2754
2755
2756/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2757FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2758{
2759 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2760 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2761}
2762
2763/** Opcode 0x66 0x0f 0x6a - vpunpckhdq Vx, Hx, W */
2764FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2765{
2766 IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W");
2767 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2768}
2769/* Opcode 0xf3 0x0f 0x6a - invalid */
2770
2771
2772/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2773FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2774/** Opcode 0x66 0x0f 0x6b - vpackssdw Vx, Hx, Wx */
2775FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx);
2776/* Opcode 0xf3 0x0f 0x6b - invalid */
2777
2778
2779/* Opcode 0x0f 0x6c - invalid */
2780
2781/** Opcode 0x66 0x0f 0x6c - vpunpcklqdq Vx, Hx, Wx */
2782FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2783{
2784 IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
2785 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2786}
2787
2788/* Opcode 0xf3 0x0f 0x6c - invalid */
2789/* Opcode 0xf2 0x0f 0x6c - invalid */
2790
2791
2792/* Opcode 0x0f 0x6d - invalid */
2793
2794/** Opcode 0x66 0x0f 0x6d - vpunpckhqdq Vx, Hx, W */
2795FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
2796{
2797 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2798 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2799}
2800
2801/* Opcode 0xf3 0x0f 0x6d - invalid */
2802
2803
2804/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
2805FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
2806{
2807 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2808 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2809 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2810 else
2811 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2812 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2813 {
2814 /* MMX, greg */
2815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2816 IEM_MC_BEGIN(0, 1);
2817 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2818 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2819 IEM_MC_LOCAL(uint64_t, u64Tmp);
2820 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2821 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2822 else
2823 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2824 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2825 IEM_MC_ADVANCE_RIP();
2826 IEM_MC_END();
2827 }
2828 else
2829 {
2830 /* MMX, [mem] */
2831 IEM_MC_BEGIN(0, 2);
2832 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2833 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2836 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2837 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2838 {
2839 IEM_MC_LOCAL(uint64_t, u64Tmp);
2840 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2841 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2842 }
2843 else
2844 {
2845 IEM_MC_LOCAL(uint32_t, u32Tmp);
2846 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2847 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2848 }
2849 IEM_MC_ADVANCE_RIP();
2850 IEM_MC_END();
2851 }
2852 return VINF_SUCCESS;
2853}
2854
2855/** Opcode 0x66 0x0f 0x6e - vmovd/q Vy, Ey */
2856FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
2857{
2858 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2859 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2860 IEMOP_MNEMONIC(vmovdq_Wq_Eq, "vmovq Wq,Eq");
2861 else
2862 IEMOP_MNEMONIC(vmovdq_Wd_Ed, "vmovd Wd,Ed");
2863 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2864 {
2865 /* XMM, greg*/
2866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2867 IEM_MC_BEGIN(0, 1);
2868 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2869 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2870 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2871 {
2872 IEM_MC_LOCAL(uint64_t, u64Tmp);
2873 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2874 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2875 }
2876 else
2877 {
2878 IEM_MC_LOCAL(uint32_t, u32Tmp);
2879 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2880 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2881 }
2882 IEM_MC_ADVANCE_RIP();
2883 IEM_MC_END();
2884 }
2885 else
2886 {
2887 /* XMM, [mem] */
2888 IEM_MC_BEGIN(0, 2);
2889 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2890 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2893 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2894 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2895 {
2896 IEM_MC_LOCAL(uint64_t, u64Tmp);
2897 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2898 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2899 }
2900 else
2901 {
2902 IEM_MC_LOCAL(uint32_t, u32Tmp);
2903 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2904 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2905 }
2906 IEM_MC_ADVANCE_RIP();
2907 IEM_MC_END();
2908 }
2909 return VINF_SUCCESS;
2910}
2911
2912/* Opcode 0xf3 0x0f 0x6e - invalid */
2913
2914
2915/** Opcode 0x0f 0x6f - movq Pq, Qq */
2916FNIEMOP_DEF(iemOp_movq_Pq_Qq)
2917{
2918 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2919 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
2920 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2921 {
2922 /*
2923 * Register, register.
2924 */
2925 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2926 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2928 IEM_MC_BEGIN(0, 1);
2929 IEM_MC_LOCAL(uint64_t, u64Tmp);
2930 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2931 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2932 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2933 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2934 IEM_MC_ADVANCE_RIP();
2935 IEM_MC_END();
2936 }
2937 else
2938 {
2939 /*
2940 * Register, memory.
2941 */
2942 IEM_MC_BEGIN(0, 2);
2943 IEM_MC_LOCAL(uint64_t, u64Tmp);
2944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2945
2946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2948 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2949 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2950 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2951 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2952
2953 IEM_MC_ADVANCE_RIP();
2954 IEM_MC_END();
2955 }
2956 return VINF_SUCCESS;
2957}
2958
2959/** Opcode 0x66 0x0f 0x6f - vmovdqa Vx, Wx */
2960FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
2961{
2962 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2963 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
2964 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2965 {
2966 /*
2967 * Register, register.
2968 */
2969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2970 IEM_MC_BEGIN(0, 0);
2971 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2972 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2973 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2974 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2975 IEM_MC_ADVANCE_RIP();
2976 IEM_MC_END();
2977 }
2978 else
2979 {
2980 /*
2981 * Register, memory.
2982 */
2983 IEM_MC_BEGIN(0, 2);
2984 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
2985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2986
2987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2989 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2990 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2991 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2992 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2993
2994 IEM_MC_ADVANCE_RIP();
2995 IEM_MC_END();
2996 }
2997 return VINF_SUCCESS;
2998}
2999
3000/** Opcode 0xf3 0x0f 0x6f - vmovdqu Vx, Wx */
3001FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
3002{
3003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3004 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3006 {
3007 /*
3008 * Register, register.
3009 */
3010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3011 IEM_MC_BEGIN(0, 0);
3012 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3013 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3014 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3015 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3016 IEM_MC_ADVANCE_RIP();
3017 IEM_MC_END();
3018 }
3019 else
3020 {
3021 /*
3022 * Register, memory.
3023 */
3024 IEM_MC_BEGIN(0, 2);
3025 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3027
3028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3030 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3031 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3032 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3033 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3034
3035 IEM_MC_ADVANCE_RIP();
3036 IEM_MC_END();
3037 }
3038 return VINF_SUCCESS;
3039}
3040
3041
3042/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3043FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3044{
3045 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3046 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3047 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3048 {
3049 /*
3050 * Register, register.
3051 */
3052 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3054
3055 IEM_MC_BEGIN(3, 0);
3056 IEM_MC_ARG(uint64_t *, pDst, 0);
3057 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3058 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3059 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3060 IEM_MC_PREPARE_FPU_USAGE();
3061 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3062 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3063 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3064 IEM_MC_ADVANCE_RIP();
3065 IEM_MC_END();
3066 }
3067 else
3068 {
3069 /*
3070 * Register, memory.
3071 */
3072 IEM_MC_BEGIN(3, 2);
3073 IEM_MC_ARG(uint64_t *, pDst, 0);
3074 IEM_MC_LOCAL(uint64_t, uSrc);
3075 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3077
3078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3079 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3080 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3082 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3083
3084 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3085 IEM_MC_PREPARE_FPU_USAGE();
3086 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3087 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3088
3089 IEM_MC_ADVANCE_RIP();
3090 IEM_MC_END();
3091 }
3092 return VINF_SUCCESS;
3093}
3094
3095/** Opcode 0x66 0x0f 0x70 - vpshufd Vx, Wx, Ib */
3096FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
3097{
3098 IEMOP_MNEMONIC(vpshufd_Vx_Wx_Ib, "vpshufd Vx,Wx,Ib");
3099 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3101 {
3102 /*
3103 * Register, register.
3104 */
3105 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3107
3108 IEM_MC_BEGIN(3, 0);
3109 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3110 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3111 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3112 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3113 IEM_MC_PREPARE_SSE_USAGE();
3114 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3115 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3116 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3117 IEM_MC_ADVANCE_RIP();
3118 IEM_MC_END();
3119 }
3120 else
3121 {
3122 /*
3123 * Register, memory.
3124 */
3125 IEM_MC_BEGIN(3, 2);
3126 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3127 IEM_MC_LOCAL(RTUINT128U, uSrc);
3128 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3130
3131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3132 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3133 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3135 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3136
3137 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3138 IEM_MC_PREPARE_SSE_USAGE();
3139 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3140 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3141
3142 IEM_MC_ADVANCE_RIP();
3143 IEM_MC_END();
3144 }
3145 return VINF_SUCCESS;
3146}
3147
3148/** Opcode 0xf3 0x0f 0x70 - vpshufhw Vx, Wx, Ib */
3149FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
3150{
3151 IEMOP_MNEMONIC(vpshufhw_Vx_Wx_Ib, "vpshufhw Vx,Wx,Ib");
3152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3153 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3154 {
3155 /*
3156 * Register, register.
3157 */
3158 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3160
3161 IEM_MC_BEGIN(3, 0);
3162 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3163 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3164 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3165 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3166 IEM_MC_PREPARE_SSE_USAGE();
3167 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3168 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3169 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3170 IEM_MC_ADVANCE_RIP();
3171 IEM_MC_END();
3172 }
3173 else
3174 {
3175 /*
3176 * Register, memory.
3177 */
3178 IEM_MC_BEGIN(3, 2);
3179 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3180 IEM_MC_LOCAL(RTUINT128U, uSrc);
3181 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3183
3184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3185 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3186 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3188 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3189
3190 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3191 IEM_MC_PREPARE_SSE_USAGE();
3192 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3193 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3194
3195 IEM_MC_ADVANCE_RIP();
3196 IEM_MC_END();
3197 }
3198 return VINF_SUCCESS;
3199}
3200
3201/** Opcode 0xf2 0x0f 0x70 - vpshuflw Vx, Wx, Ib */
3202FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
3203{
3204 IEMOP_MNEMONIC(vpshuflw_Vx_Wx_Ib, "vpshuflw Vx,Wx,Ib");
3205 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3206 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3207 {
3208 /*
3209 * Register, register.
3210 */
3211 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3213
3214 IEM_MC_BEGIN(3, 0);
3215 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3216 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3217 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3218 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3219 IEM_MC_PREPARE_SSE_USAGE();
3220 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3221 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3222 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3223 IEM_MC_ADVANCE_RIP();
3224 IEM_MC_END();
3225 }
3226 else
3227 {
3228 /*
3229 * Register, memory.
3230 */
3231 IEM_MC_BEGIN(3, 2);
3232 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3233 IEM_MC_LOCAL(RTUINT128U, uSrc);
3234 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3236
3237 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3238 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3239 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3241 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3242
3243 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3244 IEM_MC_PREPARE_SSE_USAGE();
3245 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3246 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3247
3248 IEM_MC_ADVANCE_RIP();
3249 IEM_MC_END();
3250 }
3251 return VINF_SUCCESS;
3252}
3253
3254
3255/** Opcode 0x0f 0x71 11/2. */
3256FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3257
3258/** Opcode 0x66 0x0f 0x71 11/2. */
3259FNIEMOP_STUB_1(iemOp_Grp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm);
3260
3261/** Opcode 0x0f 0x71 11/4. */
3262FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3263
3264/** Opcode 0x66 0x0f 0x71 11/4. */
3265FNIEMOP_STUB_1(iemOp_Grp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm);
3266
3267/** Opcode 0x0f 0x71 11/6. */
3268FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3269
3270/** Opcode 0x66 0x0f 0x71 11/6. */
3271FNIEMOP_STUB_1(iemOp_Grp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm);
3272
3273
3274/**
3275 * Group 12 jump table for register variant.
3276 */
3277IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3278{
3279 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3280 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3281 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3282 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3283 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3284 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3285 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3286 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3287};
3288AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3289
3290
3291/** Opcode 0x0f 0x71. */
3292FNIEMOP_DEF(iemOp_Grp12)
3293{
3294 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3295 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3296 /* register, register */
3297 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3298 + pVCpu->iem.s.idxPrefix], bRm);
3299 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3300}
3301
3302
3303/** Opcode 0x0f 0x72 11/2. */
3304FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3305
3306/** Opcode 0x66 0x0f 0x72 11/2. */
3307FNIEMOP_STUB_1(iemOp_Grp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm);
3308
3309/** Opcode 0x0f 0x72 11/4. */
3310FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3311
3312/** Opcode 0x66 0x0f 0x72 11/4. */
3313FNIEMOP_STUB_1(iemOp_Grp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm);
3314
3315/** Opcode 0x0f 0x72 11/6. */
3316FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3317
3318/** Opcode 0x66 0x0f 0x72 11/6. */
3319FNIEMOP_STUB_1(iemOp_Grp13_vpslld_Hx_Ux_Ib, uint8_t, bRm);
3320
3321
3322/**
3323 * Group 13 jump table for register variant.
3324 */
3325IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
3326{
3327 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3328 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3329 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3330 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3331 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3332 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3333 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3334 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3335};
3336AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
3337
3338/** Opcode 0x0f 0x72. */
3339FNIEMOP_DEF(iemOp_Grp13)
3340{
3341 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3342 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3343 /* register, register */
3344 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3345 + pVCpu->iem.s.idxPrefix], bRm);
3346 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3347}
3348
3349
3350/** Opcode 0x0f 0x73 11/2. */
3351FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3352
3353/** Opcode 0x66 0x0f 0x73 11/2. */
3354FNIEMOP_STUB_1(iemOp_Grp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm);
3355
3356/** Opcode 0x66 0x0f 0x73 11/3. */
3357FNIEMOP_STUB_1(iemOp_Grp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm); //NEXT
3358
3359/** Opcode 0x0f 0x73 11/6. */
3360FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3361
3362/** Opcode 0x66 0x0f 0x73 11/6. */
3363FNIEMOP_STUB_1(iemOp_Grp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm);
3364
3365/** Opcode 0x66 0x0f 0x73 11/7. */
3366FNIEMOP_STUB_1(iemOp_Grp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm); //NEXT
3367
3368/**
3369 * Group 14 jump table for register variant.
3370 */
3371IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
3372{
3373 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3374 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3375 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3376 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3377 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3378 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3379 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3380 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3381};
3382AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
3383
3384
3385/** Opcode 0x0f 0x73. */
3386FNIEMOP_DEF(iemOp_Grp14)
3387{
3388 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3389 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3390 /* register, register */
3391 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3392 + pVCpu->iem.s.idxPrefix], bRm);
3393 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3394}
3395
3396
3397/**
3398 * Common worker for MMX instructions on the form:
3399 * pxxx mm1, mm2/mem64
3400 */
3401FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3402{
3403 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3404 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3405 {
3406 /*
3407 * Register, register.
3408 */
3409 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3410 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3412 IEM_MC_BEGIN(2, 0);
3413 IEM_MC_ARG(uint64_t *, pDst, 0);
3414 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3415 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3416 IEM_MC_PREPARE_FPU_USAGE();
3417 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3418 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3419 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3420 IEM_MC_ADVANCE_RIP();
3421 IEM_MC_END();
3422 }
3423 else
3424 {
3425 /*
3426 * Register, memory.
3427 */
3428 IEM_MC_BEGIN(2, 2);
3429 IEM_MC_ARG(uint64_t *, pDst, 0);
3430 IEM_MC_LOCAL(uint64_t, uSrc);
3431 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3433
3434 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3436 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3437 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3438
3439 IEM_MC_PREPARE_FPU_USAGE();
3440 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3441 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3442
3443 IEM_MC_ADVANCE_RIP();
3444 IEM_MC_END();
3445 }
3446 return VINF_SUCCESS;
3447}
3448
3449
3450/**
3451 * Common worker for SSE2 instructions on the forms:
3452 * pxxx xmm1, xmm2/mem128
3453 *
3454 * Proper alignment of the 128-bit operand is enforced.
3455 * Exceptions type 4. SSE2 cpuid checks.
3456 */
3457FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3458{
3459 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3460 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3461 {
3462 /*
3463 * Register, register.
3464 */
3465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3466 IEM_MC_BEGIN(2, 0);
3467 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3468 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3469 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3470 IEM_MC_PREPARE_SSE_USAGE();
3471 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3472 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3473 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3474 IEM_MC_ADVANCE_RIP();
3475 IEM_MC_END();
3476 }
3477 else
3478 {
3479 /*
3480 * Register, memory.
3481 */
3482 IEM_MC_BEGIN(2, 2);
3483 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3484 IEM_MC_LOCAL(RTUINT128U, uSrc);
3485 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3486 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3487
3488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3490 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3491 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3492
3493 IEM_MC_PREPARE_SSE_USAGE();
3494 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3495 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3496
3497 IEM_MC_ADVANCE_RIP();
3498 IEM_MC_END();
3499 }
3500 return VINF_SUCCESS;
3501}
3502
3503
3504/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3505FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3506{
3507 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3508 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3509}
3510
3511/** Opcode 0x66 0x0f 0x74 - vpcmpeqb Vx, Hx, Wx */
3512FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3513{
3514 IEMOP_MNEMONIC(vpcmpeqb, "vpcmpeqb");
3515 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3516}
3517
3518/* Opcode 0xf3 0x0f 0x74 - invalid */
3519/* Opcode 0xf2 0x0f 0x74 - invalid */
3520
3521
3522/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3523FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3524{
3525 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3526 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3527}
3528
3529/** Opcode 0x66 0x0f 0x75 - vpcmpeqw Vx, Hx, Wx */
3530FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3531{
3532 IEMOP_MNEMONIC(vpcmpeqw, "vpcmpeqw");
3533 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3534}
3535
3536/* Opcode 0xf3 0x0f 0x75 - invalid */
3537/* Opcode 0xf2 0x0f 0x75 - invalid */
3538
3539
3540/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3541FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3542{
3543 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3544 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3545}
3546
3547/** Opcode 0x66 0x0f 0x76 - vpcmpeqd Vx, Hx, Wx */
3548FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3549{
3550 IEMOP_MNEMONIC(vpcmpeqd, "vpcmpeqd");
3551 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3552}
3553
3554/* Opcode 0xf3 0x0f 0x76 - invalid */
3555/* Opcode 0xf2 0x0f 0x76 - invalid */
3556
3557
3558/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3559FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3560/* Opcode 0x66 0x0f 0x77 - invalid */
3561/* Opcode 0xf3 0x0f 0x77 - invalid */
3562/* Opcode 0xf2 0x0f 0x77 - invalid */
3563
3564/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3565FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3566/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3567FNIEMOP_STUB(iemOp_AmdGrp17);
3568/* Opcode 0xf3 0x0f 0x78 - invalid */
3569/* Opcode 0xf2 0x0f 0x78 - invalid */
3570
3571/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3572FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3573/* Opcode 0x66 0x0f 0x79 - invalid */
3574/* Opcode 0xf3 0x0f 0x79 - invalid */
3575/* Opcode 0xf2 0x0f 0x79 - invalid */
3576
3577/* Opcode 0x0f 0x7a - invalid */
3578/* Opcode 0x66 0x0f 0x7a - invalid */
3579/* Opcode 0xf3 0x0f 0x7a - invalid */
3580/* Opcode 0xf2 0x0f 0x7a - invalid */
3581
3582/* Opcode 0x0f 0x7b - invalid */
3583/* Opcode 0x66 0x0f 0x7b - invalid */
3584/* Opcode 0xf3 0x0f 0x7b - invalid */
3585/* Opcode 0xf2 0x0f 0x7b - invalid */
3586
3587/* Opcode 0x0f 0x7c - invalid */
3588/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3589FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3590/* Opcode 0xf3 0x0f 0x7c - invalid */
3591/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3592FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3593
3594/* Opcode 0x0f 0x7d - invalid */
3595/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3596FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3597/* Opcode 0xf3 0x0f 0x7d - invalid */
3598/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3599FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3600
3601
3602/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
3603FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
3604{
3605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3606 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3607 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3608 else
3609 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3610 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3611 {
3612 /* greg, MMX */
3613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3614 IEM_MC_BEGIN(0, 1);
3615 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3616 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3617 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3618 {
3619 IEM_MC_LOCAL(uint64_t, u64Tmp);
3620 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3621 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3622 }
3623 else
3624 {
3625 IEM_MC_LOCAL(uint32_t, u32Tmp);
3626 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3627 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3628 }
3629 IEM_MC_ADVANCE_RIP();
3630 IEM_MC_END();
3631 }
3632 else
3633 {
3634 /* [mem], MMX */
3635 IEM_MC_BEGIN(0, 2);
3636 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3637 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3640 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3641 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3642 {
3643 IEM_MC_LOCAL(uint64_t, u64Tmp);
3644 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3645 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3646 }
3647 else
3648 {
3649 IEM_MC_LOCAL(uint32_t, u32Tmp);
3650 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3651 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3652 }
3653 IEM_MC_ADVANCE_RIP();
3654 IEM_MC_END();
3655 }
3656 return VINF_SUCCESS;
3657}
3658
3659/** Opcode 0x66 0x0f 0x7e - vmovd_q Ey, Vy */
3660FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
3661{
3662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3663 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3664 IEMOP_MNEMONIC(vmovq_Eq_Wq, "vmovq Eq,Wq");
3665 else
3666 IEMOP_MNEMONIC(vmovd_Ed_Wd, "vmovd Ed,Wd");
3667 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3668 {
3669 /* greg, XMM */
3670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3671 IEM_MC_BEGIN(0, 1);
3672 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3673 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3674 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3675 {
3676 IEM_MC_LOCAL(uint64_t, u64Tmp);
3677 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3678 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3679 }
3680 else
3681 {
3682 IEM_MC_LOCAL(uint32_t, u32Tmp);
3683 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3684 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3685 }
3686 IEM_MC_ADVANCE_RIP();
3687 IEM_MC_END();
3688 }
3689 else
3690 {
3691 /* [mem], XMM */
3692 IEM_MC_BEGIN(0, 2);
3693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3694 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3697 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3698 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3699 {
3700 IEM_MC_LOCAL(uint64_t, u64Tmp);
3701 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3702 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3703 }
3704 else
3705 {
3706 IEM_MC_LOCAL(uint32_t, u32Tmp);
3707 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3708 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3709 }
3710 IEM_MC_ADVANCE_RIP();
3711 IEM_MC_END();
3712 }
3713 return VINF_SUCCESS;
3714}
3715
3716/** Opcode 0xf3 0x0f 0x7e - vmovq Vq, Wq */
3717FNIEMOP_STUB(iemOp_vmovq_Vq_Wq);
3718/* Opcode 0xf2 0x0f 0x7e - invalid */
3719
3720
3721/** Opcode 0x0f 0x7f - movq Qq, Pq */
3722FNIEMOP_DEF(iemOp_movq_Qq_Pq)
3723{
3724 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3725 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3726 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3727 {
3728 /*
3729 * Register, register.
3730 */
3731 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3732 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3734 IEM_MC_BEGIN(0, 1);
3735 IEM_MC_LOCAL(uint64_t, u64Tmp);
3736 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3737 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3738 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3739 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3740 IEM_MC_ADVANCE_RIP();
3741 IEM_MC_END();
3742 }
3743 else
3744 {
3745 /*
3746 * Register, memory.
3747 */
3748 IEM_MC_BEGIN(0, 2);
3749 IEM_MC_LOCAL(uint64_t, u64Tmp);
3750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3751
3752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3754 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3755 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3756
3757 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3758 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3759
3760 IEM_MC_ADVANCE_RIP();
3761 IEM_MC_END();
3762 }
3763 return VINF_SUCCESS;
3764}
3765
3766/** Opcode 0x66 0x0f 0x7f - vmovdqa Wx,Vx */
3767FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
3768{
3769 IEMOP_MNEMONIC(vmovdqa_Wdq_Vdq, "vmovdqa Wx,Vx");
3770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3771 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3772 {
3773 /*
3774 * Register, register.
3775 */
3776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3777 IEM_MC_BEGIN(0, 0);
3778 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3779 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3780 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3781 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3782 IEM_MC_ADVANCE_RIP();
3783 IEM_MC_END();
3784 }
3785 else
3786 {
3787 /*
3788 * Register, memory.
3789 */
3790 IEM_MC_BEGIN(0, 2);
3791 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3793
3794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3796 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3797 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3798
3799 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3800 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3801
3802 IEM_MC_ADVANCE_RIP();
3803 IEM_MC_END();
3804 }
3805 return VINF_SUCCESS;
3806}
3807
3808/** Opcode 0xf3 0x0f 0x7f - vmovdqu Wx,Vx */
3809FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
3810{
3811 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3812 IEMOP_MNEMONIC(vmovdqu_Wdq_Vdq, "vmovdqu Wx,Vx");
3813 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3814 {
3815 /*
3816 * Register, register.
3817 */
3818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3819 IEM_MC_BEGIN(0, 0);
3820 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3821 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3822 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3823 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3824 IEM_MC_ADVANCE_RIP();
3825 IEM_MC_END();
3826 }
3827 else
3828 {
3829 /*
3830 * Register, memory.
3831 */
3832 IEM_MC_BEGIN(0, 2);
3833 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3835
3836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3838 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3839 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3840
3841 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3842 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3843
3844 IEM_MC_ADVANCE_RIP();
3845 IEM_MC_END();
3846 }
3847 return VINF_SUCCESS;
3848}
3849
3850/* Opcode 0xf2 0x0f 0x7f - invalid */
3851
3852
3853
3854/** Opcode 0x0f 0x80. */
3855FNIEMOP_DEF(iemOp_jo_Jv)
3856{
3857 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3858 IEMOP_HLP_MIN_386();
3859 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3860 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3861 {
3862 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3864
3865 IEM_MC_BEGIN(0, 0);
3866 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3867 IEM_MC_REL_JMP_S16(i16Imm);
3868 } IEM_MC_ELSE() {
3869 IEM_MC_ADVANCE_RIP();
3870 } IEM_MC_ENDIF();
3871 IEM_MC_END();
3872 }
3873 else
3874 {
3875 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3877
3878 IEM_MC_BEGIN(0, 0);
3879 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3880 IEM_MC_REL_JMP_S32(i32Imm);
3881 } IEM_MC_ELSE() {
3882 IEM_MC_ADVANCE_RIP();
3883 } IEM_MC_ENDIF();
3884 IEM_MC_END();
3885 }
3886 return VINF_SUCCESS;
3887}
3888
3889
3890/** Opcode 0x0f 0x81. */
3891FNIEMOP_DEF(iemOp_jno_Jv)
3892{
3893 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3894 IEMOP_HLP_MIN_386();
3895 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3896 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3897 {
3898 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3900
3901 IEM_MC_BEGIN(0, 0);
3902 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3903 IEM_MC_ADVANCE_RIP();
3904 } IEM_MC_ELSE() {
3905 IEM_MC_REL_JMP_S16(i16Imm);
3906 } IEM_MC_ENDIF();
3907 IEM_MC_END();
3908 }
3909 else
3910 {
3911 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3913
3914 IEM_MC_BEGIN(0, 0);
3915 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3916 IEM_MC_ADVANCE_RIP();
3917 } IEM_MC_ELSE() {
3918 IEM_MC_REL_JMP_S32(i32Imm);
3919 } IEM_MC_ENDIF();
3920 IEM_MC_END();
3921 }
3922 return VINF_SUCCESS;
3923}
3924
3925
3926/** Opcode 0x0f 0x82. */
3927FNIEMOP_DEF(iemOp_jc_Jv)
3928{
3929 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3930 IEMOP_HLP_MIN_386();
3931 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3932 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3933 {
3934 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3936
3937 IEM_MC_BEGIN(0, 0);
3938 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3939 IEM_MC_REL_JMP_S16(i16Imm);
3940 } IEM_MC_ELSE() {
3941 IEM_MC_ADVANCE_RIP();
3942 } IEM_MC_ENDIF();
3943 IEM_MC_END();
3944 }
3945 else
3946 {
3947 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3949
3950 IEM_MC_BEGIN(0, 0);
3951 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3952 IEM_MC_REL_JMP_S32(i32Imm);
3953 } IEM_MC_ELSE() {
3954 IEM_MC_ADVANCE_RIP();
3955 } IEM_MC_ENDIF();
3956 IEM_MC_END();
3957 }
3958 return VINF_SUCCESS;
3959}
3960
3961
3962/** Opcode 0x0f 0x83. */
3963FNIEMOP_DEF(iemOp_jnc_Jv)
3964{
3965 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
3966 IEMOP_HLP_MIN_386();
3967 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3968 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3969 {
3970 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3972
3973 IEM_MC_BEGIN(0, 0);
3974 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3975 IEM_MC_ADVANCE_RIP();
3976 } IEM_MC_ELSE() {
3977 IEM_MC_REL_JMP_S16(i16Imm);
3978 } IEM_MC_ENDIF();
3979 IEM_MC_END();
3980 }
3981 else
3982 {
3983 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3985
3986 IEM_MC_BEGIN(0, 0);
3987 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3988 IEM_MC_ADVANCE_RIP();
3989 } IEM_MC_ELSE() {
3990 IEM_MC_REL_JMP_S32(i32Imm);
3991 } IEM_MC_ENDIF();
3992 IEM_MC_END();
3993 }
3994 return VINF_SUCCESS;
3995}
3996
3997
3998/** Opcode 0x0f 0x84. */
3999FNIEMOP_DEF(iemOp_je_Jv)
4000{
4001 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4002 IEMOP_HLP_MIN_386();
4003 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4004 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4005 {
4006 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4008
4009 IEM_MC_BEGIN(0, 0);
4010 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4011 IEM_MC_REL_JMP_S16(i16Imm);
4012 } IEM_MC_ELSE() {
4013 IEM_MC_ADVANCE_RIP();
4014 } IEM_MC_ENDIF();
4015 IEM_MC_END();
4016 }
4017 else
4018 {
4019 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4021
4022 IEM_MC_BEGIN(0, 0);
4023 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4024 IEM_MC_REL_JMP_S32(i32Imm);
4025 } IEM_MC_ELSE() {
4026 IEM_MC_ADVANCE_RIP();
4027 } IEM_MC_ENDIF();
4028 IEM_MC_END();
4029 }
4030 return VINF_SUCCESS;
4031}
4032
4033
4034/** Opcode 0x0f 0x85. */
4035FNIEMOP_DEF(iemOp_jne_Jv)
4036{
4037 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4038 IEMOP_HLP_MIN_386();
4039 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4040 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4041 {
4042 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4044
4045 IEM_MC_BEGIN(0, 0);
4046 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4047 IEM_MC_ADVANCE_RIP();
4048 } IEM_MC_ELSE() {
4049 IEM_MC_REL_JMP_S16(i16Imm);
4050 } IEM_MC_ENDIF();
4051 IEM_MC_END();
4052 }
4053 else
4054 {
4055 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4057
4058 IEM_MC_BEGIN(0, 0);
4059 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4060 IEM_MC_ADVANCE_RIP();
4061 } IEM_MC_ELSE() {
4062 IEM_MC_REL_JMP_S32(i32Imm);
4063 } IEM_MC_ENDIF();
4064 IEM_MC_END();
4065 }
4066 return VINF_SUCCESS;
4067}
4068
4069
4070/** Opcode 0x0f 0x86. */
4071FNIEMOP_DEF(iemOp_jbe_Jv)
4072{
4073 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4074 IEMOP_HLP_MIN_386();
4075 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4076 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4077 {
4078 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4080
4081 IEM_MC_BEGIN(0, 0);
4082 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4083 IEM_MC_REL_JMP_S16(i16Imm);
4084 } IEM_MC_ELSE() {
4085 IEM_MC_ADVANCE_RIP();
4086 } IEM_MC_ENDIF();
4087 IEM_MC_END();
4088 }
4089 else
4090 {
4091 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4093
4094 IEM_MC_BEGIN(0, 0);
4095 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4096 IEM_MC_REL_JMP_S32(i32Imm);
4097 } IEM_MC_ELSE() {
4098 IEM_MC_ADVANCE_RIP();
4099 } IEM_MC_ENDIF();
4100 IEM_MC_END();
4101 }
4102 return VINF_SUCCESS;
4103}
4104
4105
4106/** Opcode 0x0f 0x87. */
4107FNIEMOP_DEF(iemOp_jnbe_Jv)
4108{
4109 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4110 IEMOP_HLP_MIN_386();
4111 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4112 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4113 {
4114 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4116
4117 IEM_MC_BEGIN(0, 0);
4118 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4119 IEM_MC_ADVANCE_RIP();
4120 } IEM_MC_ELSE() {
4121 IEM_MC_REL_JMP_S16(i16Imm);
4122 } IEM_MC_ENDIF();
4123 IEM_MC_END();
4124 }
4125 else
4126 {
4127 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4129
4130 IEM_MC_BEGIN(0, 0);
4131 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4132 IEM_MC_ADVANCE_RIP();
4133 } IEM_MC_ELSE() {
4134 IEM_MC_REL_JMP_S32(i32Imm);
4135 } IEM_MC_ENDIF();
4136 IEM_MC_END();
4137 }
4138 return VINF_SUCCESS;
4139}
4140
4141
4142/** Opcode 0x0f 0x88. */
4143FNIEMOP_DEF(iemOp_js_Jv)
4144{
4145 IEMOP_MNEMONIC(js_Jv, "js Jv");
4146 IEMOP_HLP_MIN_386();
4147 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4148 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4149 {
4150 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4152
4153 IEM_MC_BEGIN(0, 0);
4154 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4155 IEM_MC_REL_JMP_S16(i16Imm);
4156 } IEM_MC_ELSE() {
4157 IEM_MC_ADVANCE_RIP();
4158 } IEM_MC_ENDIF();
4159 IEM_MC_END();
4160 }
4161 else
4162 {
4163 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4165
4166 IEM_MC_BEGIN(0, 0);
4167 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4168 IEM_MC_REL_JMP_S32(i32Imm);
4169 } IEM_MC_ELSE() {
4170 IEM_MC_ADVANCE_RIP();
4171 } IEM_MC_ENDIF();
4172 IEM_MC_END();
4173 }
4174 return VINF_SUCCESS;
4175}
4176
4177
4178/** Opcode 0x0f 0x89. */
4179FNIEMOP_DEF(iemOp_jns_Jv)
4180{
4181 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4182 IEMOP_HLP_MIN_386();
4183 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4184 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4185 {
4186 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4188
4189 IEM_MC_BEGIN(0, 0);
4190 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4191 IEM_MC_ADVANCE_RIP();
4192 } IEM_MC_ELSE() {
4193 IEM_MC_REL_JMP_S16(i16Imm);
4194 } IEM_MC_ENDIF();
4195 IEM_MC_END();
4196 }
4197 else
4198 {
4199 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4201
4202 IEM_MC_BEGIN(0, 0);
4203 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4204 IEM_MC_ADVANCE_RIP();
4205 } IEM_MC_ELSE() {
4206 IEM_MC_REL_JMP_S32(i32Imm);
4207 } IEM_MC_ENDIF();
4208 IEM_MC_END();
4209 }
4210 return VINF_SUCCESS;
4211}
4212
4213
4214/** Opcode 0x0f 0x8a. */
4215FNIEMOP_DEF(iemOp_jp_Jv)
4216{
4217 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4218 IEMOP_HLP_MIN_386();
4219 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4220 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4221 {
4222 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4224
4225 IEM_MC_BEGIN(0, 0);
4226 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4227 IEM_MC_REL_JMP_S16(i16Imm);
4228 } IEM_MC_ELSE() {
4229 IEM_MC_ADVANCE_RIP();
4230 } IEM_MC_ENDIF();
4231 IEM_MC_END();
4232 }
4233 else
4234 {
4235 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4237
4238 IEM_MC_BEGIN(0, 0);
4239 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4240 IEM_MC_REL_JMP_S32(i32Imm);
4241 } IEM_MC_ELSE() {
4242 IEM_MC_ADVANCE_RIP();
4243 } IEM_MC_ENDIF();
4244 IEM_MC_END();
4245 }
4246 return VINF_SUCCESS;
4247}
4248
4249
4250/** Opcode 0x0f 0x8b. */
4251FNIEMOP_DEF(iemOp_jnp_Jv)
4252{
4253 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4254 IEMOP_HLP_MIN_386();
4255 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4256 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4257 {
4258 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4260
4261 IEM_MC_BEGIN(0, 0);
4262 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4263 IEM_MC_ADVANCE_RIP();
4264 } IEM_MC_ELSE() {
4265 IEM_MC_REL_JMP_S16(i16Imm);
4266 } IEM_MC_ENDIF();
4267 IEM_MC_END();
4268 }
4269 else
4270 {
4271 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4273
4274 IEM_MC_BEGIN(0, 0);
4275 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4276 IEM_MC_ADVANCE_RIP();
4277 } IEM_MC_ELSE() {
4278 IEM_MC_REL_JMP_S32(i32Imm);
4279 } IEM_MC_ENDIF();
4280 IEM_MC_END();
4281 }
4282 return VINF_SUCCESS;
4283}
4284
4285
4286/** Opcode 0x0f 0x8c. */
4287FNIEMOP_DEF(iemOp_jl_Jv)
4288{
4289 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4290 IEMOP_HLP_MIN_386();
4291 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4292 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4293 {
4294 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4296
4297 IEM_MC_BEGIN(0, 0);
4298 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4299 IEM_MC_REL_JMP_S16(i16Imm);
4300 } IEM_MC_ELSE() {
4301 IEM_MC_ADVANCE_RIP();
4302 } IEM_MC_ENDIF();
4303 IEM_MC_END();
4304 }
4305 else
4306 {
4307 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4309
4310 IEM_MC_BEGIN(0, 0);
4311 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4312 IEM_MC_REL_JMP_S32(i32Imm);
4313 } IEM_MC_ELSE() {
4314 IEM_MC_ADVANCE_RIP();
4315 } IEM_MC_ENDIF();
4316 IEM_MC_END();
4317 }
4318 return VINF_SUCCESS;
4319}
4320
4321
4322/** Opcode 0x0f 0x8d. */
4323FNIEMOP_DEF(iemOp_jnl_Jv)
4324{
4325 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4326 IEMOP_HLP_MIN_386();
4327 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4328 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4329 {
4330 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4332
4333 IEM_MC_BEGIN(0, 0);
4334 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4335 IEM_MC_ADVANCE_RIP();
4336 } IEM_MC_ELSE() {
4337 IEM_MC_REL_JMP_S16(i16Imm);
4338 } IEM_MC_ENDIF();
4339 IEM_MC_END();
4340 }
4341 else
4342 {
4343 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4345
4346 IEM_MC_BEGIN(0, 0);
4347 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4348 IEM_MC_ADVANCE_RIP();
4349 } IEM_MC_ELSE() {
4350 IEM_MC_REL_JMP_S32(i32Imm);
4351 } IEM_MC_ENDIF();
4352 IEM_MC_END();
4353 }
4354 return VINF_SUCCESS;
4355}
4356
4357
4358/** Opcode 0x0f 0x8e. */
4359FNIEMOP_DEF(iemOp_jle_Jv)
4360{
4361 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4362 IEMOP_HLP_MIN_386();
4363 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4364 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4365 {
4366 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4368
4369 IEM_MC_BEGIN(0, 0);
4370 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4371 IEM_MC_REL_JMP_S16(i16Imm);
4372 } IEM_MC_ELSE() {
4373 IEM_MC_ADVANCE_RIP();
4374 } IEM_MC_ENDIF();
4375 IEM_MC_END();
4376 }
4377 else
4378 {
4379 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4381
4382 IEM_MC_BEGIN(0, 0);
4383 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4384 IEM_MC_REL_JMP_S32(i32Imm);
4385 } IEM_MC_ELSE() {
4386 IEM_MC_ADVANCE_RIP();
4387 } IEM_MC_ENDIF();
4388 IEM_MC_END();
4389 }
4390 return VINF_SUCCESS;
4391}
4392
4393
4394/** Opcode 0x0f 0x8f. */
4395FNIEMOP_DEF(iemOp_jnle_Jv)
4396{
4397 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4398 IEMOP_HLP_MIN_386();
4399 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4400 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4401 {
4402 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4404
4405 IEM_MC_BEGIN(0, 0);
4406 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4407 IEM_MC_ADVANCE_RIP();
4408 } IEM_MC_ELSE() {
4409 IEM_MC_REL_JMP_S16(i16Imm);
4410 } IEM_MC_ENDIF();
4411 IEM_MC_END();
4412 }
4413 else
4414 {
4415 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4417
4418 IEM_MC_BEGIN(0, 0);
4419 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4420 IEM_MC_ADVANCE_RIP();
4421 } IEM_MC_ELSE() {
4422 IEM_MC_REL_JMP_S32(i32Imm);
4423 } IEM_MC_ENDIF();
4424 IEM_MC_END();
4425 }
4426 return VINF_SUCCESS;
4427}
4428
4429
4430/** Opcode 0x0f 0x90. */
4431FNIEMOP_DEF(iemOp_seto_Eb)
4432{
4433 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4434 IEMOP_HLP_MIN_386();
4435 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4436
4437 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4438 * any way. AMD says it's "unused", whatever that means. We're
4439 * ignoring for now. */
4440 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4441 {
4442 /* register target */
4443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4444 IEM_MC_BEGIN(0, 0);
4445 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4446 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4447 } IEM_MC_ELSE() {
4448 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4449 } IEM_MC_ENDIF();
4450 IEM_MC_ADVANCE_RIP();
4451 IEM_MC_END();
4452 }
4453 else
4454 {
4455 /* memory target */
4456 IEM_MC_BEGIN(0, 1);
4457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4460 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4461 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4462 } IEM_MC_ELSE() {
4463 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4464 } IEM_MC_ENDIF();
4465 IEM_MC_ADVANCE_RIP();
4466 IEM_MC_END();
4467 }
4468 return VINF_SUCCESS;
4469}
4470
4471
4472/** Opcode 0x0f 0x91. */
4473FNIEMOP_DEF(iemOp_setno_Eb)
4474{
4475 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4476 IEMOP_HLP_MIN_386();
4477 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4478
4479 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4480 * any way. AMD says it's "unused", whatever that means. We're
4481 * ignoring for now. */
4482 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4483 {
4484 /* register target */
4485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4486 IEM_MC_BEGIN(0, 0);
4487 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4488 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4489 } IEM_MC_ELSE() {
4490 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4491 } IEM_MC_ENDIF();
4492 IEM_MC_ADVANCE_RIP();
4493 IEM_MC_END();
4494 }
4495 else
4496 {
4497 /* memory target */
4498 IEM_MC_BEGIN(0, 1);
4499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4502 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4503 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4504 } IEM_MC_ELSE() {
4505 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4506 } IEM_MC_ENDIF();
4507 IEM_MC_ADVANCE_RIP();
4508 IEM_MC_END();
4509 }
4510 return VINF_SUCCESS;
4511}
4512
4513
4514/** Opcode 0x0f 0x92. */
4515FNIEMOP_DEF(iemOp_setc_Eb)
4516{
4517 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4518 IEMOP_HLP_MIN_386();
4519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4520
4521 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4522 * any way. AMD says it's "unused", whatever that means. We're
4523 * ignoring for now. */
4524 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4525 {
4526 /* register target */
4527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4528 IEM_MC_BEGIN(0, 0);
4529 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4530 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4531 } IEM_MC_ELSE() {
4532 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4533 } IEM_MC_ENDIF();
4534 IEM_MC_ADVANCE_RIP();
4535 IEM_MC_END();
4536 }
4537 else
4538 {
4539 /* memory target */
4540 IEM_MC_BEGIN(0, 1);
4541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4544 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4545 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4546 } IEM_MC_ELSE() {
4547 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4548 } IEM_MC_ENDIF();
4549 IEM_MC_ADVANCE_RIP();
4550 IEM_MC_END();
4551 }
4552 return VINF_SUCCESS;
4553}
4554
4555
4556/** Opcode 0x0f 0x93. */
4557FNIEMOP_DEF(iemOp_setnc_Eb)
4558{
4559 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4560 IEMOP_HLP_MIN_386();
4561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4562
4563 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4564 * any way. AMD says it's "unused", whatever that means. We're
4565 * ignoring for now. */
4566 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4567 {
4568 /* register target */
4569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4570 IEM_MC_BEGIN(0, 0);
4571 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4572 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4573 } IEM_MC_ELSE() {
4574 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4575 } IEM_MC_ENDIF();
4576 IEM_MC_ADVANCE_RIP();
4577 IEM_MC_END();
4578 }
4579 else
4580 {
4581 /* memory target */
4582 IEM_MC_BEGIN(0, 1);
4583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4586 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4587 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4588 } IEM_MC_ELSE() {
4589 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4590 } IEM_MC_ENDIF();
4591 IEM_MC_ADVANCE_RIP();
4592 IEM_MC_END();
4593 }
4594 return VINF_SUCCESS;
4595}
4596
4597
4598/** Opcode 0x0f 0x94. */
4599FNIEMOP_DEF(iemOp_sete_Eb)
4600{
4601 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4602 IEMOP_HLP_MIN_386();
4603 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4604
4605 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4606 * any way. AMD says it's "unused", whatever that means. We're
4607 * ignoring for now. */
4608 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4609 {
4610 /* register target */
4611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4612 IEM_MC_BEGIN(0, 0);
4613 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4614 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4615 } IEM_MC_ELSE() {
4616 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4617 } IEM_MC_ENDIF();
4618 IEM_MC_ADVANCE_RIP();
4619 IEM_MC_END();
4620 }
4621 else
4622 {
4623 /* memory target */
4624 IEM_MC_BEGIN(0, 1);
4625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4628 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4629 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4630 } IEM_MC_ELSE() {
4631 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4632 } IEM_MC_ENDIF();
4633 IEM_MC_ADVANCE_RIP();
4634 IEM_MC_END();
4635 }
4636 return VINF_SUCCESS;
4637}
4638
4639
4640/** Opcode 0x0f 0x95. */
4641FNIEMOP_DEF(iemOp_setne_Eb)
4642{
4643 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4644 IEMOP_HLP_MIN_386();
4645 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4646
4647 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4648 * any way. AMD says it's "unused", whatever that means. We're
4649 * ignoring for now. */
4650 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4651 {
4652 /* register target */
4653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4654 IEM_MC_BEGIN(0, 0);
4655 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4656 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4657 } IEM_MC_ELSE() {
4658 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4659 } IEM_MC_ENDIF();
4660 IEM_MC_ADVANCE_RIP();
4661 IEM_MC_END();
4662 }
4663 else
4664 {
4665 /* memory target */
4666 IEM_MC_BEGIN(0, 1);
4667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4670 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4671 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4672 } IEM_MC_ELSE() {
4673 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4674 } IEM_MC_ENDIF();
4675 IEM_MC_ADVANCE_RIP();
4676 IEM_MC_END();
4677 }
4678 return VINF_SUCCESS;
4679}
4680
4681
4682/** Opcode 0x0f 0x96. */
4683FNIEMOP_DEF(iemOp_setbe_Eb)
4684{
4685 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4686 IEMOP_HLP_MIN_386();
4687 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4688
4689 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4690 * any way. AMD says it's "unused", whatever that means. We're
4691 * ignoring for now. */
4692 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4693 {
4694 /* register target */
4695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4696 IEM_MC_BEGIN(0, 0);
4697 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4698 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4699 } IEM_MC_ELSE() {
4700 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4701 } IEM_MC_ENDIF();
4702 IEM_MC_ADVANCE_RIP();
4703 IEM_MC_END();
4704 }
4705 else
4706 {
4707 /* memory target */
4708 IEM_MC_BEGIN(0, 1);
4709 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4712 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4713 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4714 } IEM_MC_ELSE() {
4715 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4716 } IEM_MC_ENDIF();
4717 IEM_MC_ADVANCE_RIP();
4718 IEM_MC_END();
4719 }
4720 return VINF_SUCCESS;
4721}
4722
4723
4724/** Opcode 0x0f 0x97. */
4725FNIEMOP_DEF(iemOp_setnbe_Eb)
4726{
4727 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4728 IEMOP_HLP_MIN_386();
4729 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4730
4731 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4732 * any way. AMD says it's "unused", whatever that means. We're
4733 * ignoring for now. */
4734 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4735 {
4736 /* register target */
4737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4738 IEM_MC_BEGIN(0, 0);
4739 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4740 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4741 } IEM_MC_ELSE() {
4742 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4743 } IEM_MC_ENDIF();
4744 IEM_MC_ADVANCE_RIP();
4745 IEM_MC_END();
4746 }
4747 else
4748 {
4749 /* memory target */
4750 IEM_MC_BEGIN(0, 1);
4751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4754 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4755 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4756 } IEM_MC_ELSE() {
4757 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4758 } IEM_MC_ENDIF();
4759 IEM_MC_ADVANCE_RIP();
4760 IEM_MC_END();
4761 }
4762 return VINF_SUCCESS;
4763}
4764
4765
4766/** Opcode 0x0f 0x98. */
4767FNIEMOP_DEF(iemOp_sets_Eb)
4768{
4769 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4770 IEMOP_HLP_MIN_386();
4771 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4772
4773 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4774 * any way. AMD says it's "unused", whatever that means. We're
4775 * ignoring for now. */
4776 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4777 {
4778 /* register target */
4779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4780 IEM_MC_BEGIN(0, 0);
4781 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4782 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4783 } IEM_MC_ELSE() {
4784 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4785 } IEM_MC_ENDIF();
4786 IEM_MC_ADVANCE_RIP();
4787 IEM_MC_END();
4788 }
4789 else
4790 {
4791 /* memory target */
4792 IEM_MC_BEGIN(0, 1);
4793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4796 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4797 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4798 } IEM_MC_ELSE() {
4799 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4800 } IEM_MC_ENDIF();
4801 IEM_MC_ADVANCE_RIP();
4802 IEM_MC_END();
4803 }
4804 return VINF_SUCCESS;
4805}
4806
4807
4808/** Opcode 0x0f 0x99. */
4809FNIEMOP_DEF(iemOp_setns_Eb)
4810{
4811 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4812 IEMOP_HLP_MIN_386();
4813 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4814
4815 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4816 * any way. AMD says it's "unused", whatever that means. We're
4817 * ignoring for now. */
4818 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4819 {
4820 /* register target */
4821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4822 IEM_MC_BEGIN(0, 0);
4823 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4824 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4825 } IEM_MC_ELSE() {
4826 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4827 } IEM_MC_ENDIF();
4828 IEM_MC_ADVANCE_RIP();
4829 IEM_MC_END();
4830 }
4831 else
4832 {
4833 /* memory target */
4834 IEM_MC_BEGIN(0, 1);
4835 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4838 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4839 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4840 } IEM_MC_ELSE() {
4841 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4842 } IEM_MC_ENDIF();
4843 IEM_MC_ADVANCE_RIP();
4844 IEM_MC_END();
4845 }
4846 return VINF_SUCCESS;
4847}
4848
4849
4850/** Opcode 0x0f 0x9a. */
4851FNIEMOP_DEF(iemOp_setp_Eb)
4852{
4853 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4854 IEMOP_HLP_MIN_386();
4855 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4856
4857 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4858 * any way. AMD says it's "unused", whatever that means. We're
4859 * ignoring for now. */
4860 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4861 {
4862 /* register target */
4863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4864 IEM_MC_BEGIN(0, 0);
4865 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4866 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4867 } IEM_MC_ELSE() {
4868 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4869 } IEM_MC_ENDIF();
4870 IEM_MC_ADVANCE_RIP();
4871 IEM_MC_END();
4872 }
4873 else
4874 {
4875 /* memory target */
4876 IEM_MC_BEGIN(0, 1);
4877 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4878 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4880 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4881 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4882 } IEM_MC_ELSE() {
4883 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4884 } IEM_MC_ENDIF();
4885 IEM_MC_ADVANCE_RIP();
4886 IEM_MC_END();
4887 }
4888 return VINF_SUCCESS;
4889}
4890
4891
4892/** Opcode 0x0f 0x9b. */
4893FNIEMOP_DEF(iemOp_setnp_Eb)
4894{
4895 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4896 IEMOP_HLP_MIN_386();
4897 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4898
4899 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4900 * any way. AMD says it's "unused", whatever that means. We're
4901 * ignoring for now. */
4902 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4903 {
4904 /* register target */
4905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4906 IEM_MC_BEGIN(0, 0);
4907 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4908 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4909 } IEM_MC_ELSE() {
4910 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4911 } IEM_MC_ENDIF();
4912 IEM_MC_ADVANCE_RIP();
4913 IEM_MC_END();
4914 }
4915 else
4916 {
4917 /* memory target */
4918 IEM_MC_BEGIN(0, 1);
4919 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4922 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4923 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4924 } IEM_MC_ELSE() {
4925 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4926 } IEM_MC_ENDIF();
4927 IEM_MC_ADVANCE_RIP();
4928 IEM_MC_END();
4929 }
4930 return VINF_SUCCESS;
4931}
4932
4933
4934/** Opcode 0x0f 0x9c. */
4935FNIEMOP_DEF(iemOp_setl_Eb)
4936{
4937 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4938 IEMOP_HLP_MIN_386();
4939 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4940
4941 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4942 * any way. AMD says it's "unused", whatever that means. We're
4943 * ignoring for now. */
4944 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4945 {
4946 /* register target */
4947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4948 IEM_MC_BEGIN(0, 0);
4949 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4950 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4951 } IEM_MC_ELSE() {
4952 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4953 } IEM_MC_ENDIF();
4954 IEM_MC_ADVANCE_RIP();
4955 IEM_MC_END();
4956 }
4957 else
4958 {
4959 /* memory target */
4960 IEM_MC_BEGIN(0, 1);
4961 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4964 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4965 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4966 } IEM_MC_ELSE() {
4967 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4968 } IEM_MC_ENDIF();
4969 IEM_MC_ADVANCE_RIP();
4970 IEM_MC_END();
4971 }
4972 return VINF_SUCCESS;
4973}
4974
4975
4976/** Opcode 0x0f 0x9d. */
4977FNIEMOP_DEF(iemOp_setnl_Eb)
4978{
4979 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
4980 IEMOP_HLP_MIN_386();
4981 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4982
4983 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4984 * any way. AMD says it's "unused", whatever that means. We're
4985 * ignoring for now. */
4986 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4987 {
4988 /* register target */
4989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4990 IEM_MC_BEGIN(0, 0);
4991 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4992 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4993 } IEM_MC_ELSE() {
4994 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4995 } IEM_MC_ENDIF();
4996 IEM_MC_ADVANCE_RIP();
4997 IEM_MC_END();
4998 }
4999 else
5000 {
5001 /* memory target */
5002 IEM_MC_BEGIN(0, 1);
5003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5006 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5007 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5008 } IEM_MC_ELSE() {
5009 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5010 } IEM_MC_ENDIF();
5011 IEM_MC_ADVANCE_RIP();
5012 IEM_MC_END();
5013 }
5014 return VINF_SUCCESS;
5015}
5016
5017
5018/** Opcode 0x0f 0x9e. */
5019FNIEMOP_DEF(iemOp_setle_Eb)
5020{
5021 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5022 IEMOP_HLP_MIN_386();
5023 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5024
5025 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5026 * any way. AMD says it's "unused", whatever that means. We're
5027 * ignoring for now. */
5028 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5029 {
5030 /* register target */
5031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5032 IEM_MC_BEGIN(0, 0);
5033 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5034 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5035 } IEM_MC_ELSE() {
5036 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5037 } IEM_MC_ENDIF();
5038 IEM_MC_ADVANCE_RIP();
5039 IEM_MC_END();
5040 }
5041 else
5042 {
5043 /* memory target */
5044 IEM_MC_BEGIN(0, 1);
5045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5048 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5049 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5050 } IEM_MC_ELSE() {
5051 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5052 } IEM_MC_ENDIF();
5053 IEM_MC_ADVANCE_RIP();
5054 IEM_MC_END();
5055 }
5056 return VINF_SUCCESS;
5057}
5058
5059
5060/** Opcode 0x0f 0x9f. */
5061FNIEMOP_DEF(iemOp_setnle_Eb)
5062{
5063 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5064 IEMOP_HLP_MIN_386();
5065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5066
5067 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5068 * any way. AMD says it's "unused", whatever that means. We're
5069 * ignoring for now. */
5070 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5071 {
5072 /* register target */
5073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5074 IEM_MC_BEGIN(0, 0);
5075 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5076 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5077 } IEM_MC_ELSE() {
5078 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5079 } IEM_MC_ENDIF();
5080 IEM_MC_ADVANCE_RIP();
5081 IEM_MC_END();
5082 }
5083 else
5084 {
5085 /* memory target */
5086 IEM_MC_BEGIN(0, 1);
5087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5090 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5091 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5092 } IEM_MC_ELSE() {
5093 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5094 } IEM_MC_ENDIF();
5095 IEM_MC_ADVANCE_RIP();
5096 IEM_MC_END();
5097 }
5098 return VINF_SUCCESS;
5099}
5100
5101
5102/**
5103 * Common 'push segment-register' helper.
5104 */
5105FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5106{
5107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5108 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5109 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5110
5111 switch (pVCpu->iem.s.enmEffOpSize)
5112 {
5113 case IEMMODE_16BIT:
5114 IEM_MC_BEGIN(0, 1);
5115 IEM_MC_LOCAL(uint16_t, u16Value);
5116 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5117 IEM_MC_PUSH_U16(u16Value);
5118 IEM_MC_ADVANCE_RIP();
5119 IEM_MC_END();
5120 break;
5121
5122 case IEMMODE_32BIT:
5123 IEM_MC_BEGIN(0, 1);
5124 IEM_MC_LOCAL(uint32_t, u32Value);
5125 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5126 IEM_MC_PUSH_U32_SREG(u32Value);
5127 IEM_MC_ADVANCE_RIP();
5128 IEM_MC_END();
5129 break;
5130
5131 case IEMMODE_64BIT:
5132 IEM_MC_BEGIN(0, 1);
5133 IEM_MC_LOCAL(uint64_t, u64Value);
5134 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5135 IEM_MC_PUSH_U64(u64Value);
5136 IEM_MC_ADVANCE_RIP();
5137 IEM_MC_END();
5138 break;
5139 }
5140
5141 return VINF_SUCCESS;
5142}
5143
5144
5145/** Opcode 0x0f 0xa0. */
5146FNIEMOP_DEF(iemOp_push_fs)
5147{
5148 IEMOP_MNEMONIC(push_fs, "push fs");
5149 IEMOP_HLP_MIN_386();
5150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5151 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5152}
5153
5154
5155/** Opcode 0x0f 0xa1. */
5156FNIEMOP_DEF(iemOp_pop_fs)
5157{
5158 IEMOP_MNEMONIC(pop_fs, "pop fs");
5159 IEMOP_HLP_MIN_386();
5160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5161 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5162}
5163
5164
5165/** Opcode 0x0f 0xa2. */
5166FNIEMOP_DEF(iemOp_cpuid)
5167{
5168 IEMOP_MNEMONIC(cpuid, "cpuid");
5169 IEMOP_HLP_MIN_486(); /* not all 486es. */
5170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5171 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5172}
5173
5174
5175/**
5176 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5177 * iemOp_bts_Ev_Gv.
5178 */
5179FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5180{
5181 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5182 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5183
5184 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5185 {
5186 /* register destination. */
5187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5188 switch (pVCpu->iem.s.enmEffOpSize)
5189 {
5190 case IEMMODE_16BIT:
5191 IEM_MC_BEGIN(3, 0);
5192 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5193 IEM_MC_ARG(uint16_t, u16Src, 1);
5194 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5195
5196 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5197 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5198 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5199 IEM_MC_REF_EFLAGS(pEFlags);
5200 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5201
5202 IEM_MC_ADVANCE_RIP();
5203 IEM_MC_END();
5204 return VINF_SUCCESS;
5205
5206 case IEMMODE_32BIT:
5207 IEM_MC_BEGIN(3, 0);
5208 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5209 IEM_MC_ARG(uint32_t, u32Src, 1);
5210 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5211
5212 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5213 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5214 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5215 IEM_MC_REF_EFLAGS(pEFlags);
5216 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5217
5218 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5219 IEM_MC_ADVANCE_RIP();
5220 IEM_MC_END();
5221 return VINF_SUCCESS;
5222
5223 case IEMMODE_64BIT:
5224 IEM_MC_BEGIN(3, 0);
5225 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5226 IEM_MC_ARG(uint64_t, u64Src, 1);
5227 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5228
5229 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5230 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5231 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5232 IEM_MC_REF_EFLAGS(pEFlags);
5233 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5234
5235 IEM_MC_ADVANCE_RIP();
5236 IEM_MC_END();
5237 return VINF_SUCCESS;
5238
5239 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5240 }
5241 }
5242 else
5243 {
5244 /* memory destination. */
5245
5246 uint32_t fAccess;
5247 if (pImpl->pfnLockedU16)
5248 fAccess = IEM_ACCESS_DATA_RW;
5249 else /* BT */
5250 fAccess = IEM_ACCESS_DATA_R;
5251
5252 /** @todo test negative bit offsets! */
5253 switch (pVCpu->iem.s.enmEffOpSize)
5254 {
5255 case IEMMODE_16BIT:
5256 IEM_MC_BEGIN(3, 2);
5257 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5258 IEM_MC_ARG(uint16_t, u16Src, 1);
5259 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5261 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5262
5263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5264 if (pImpl->pfnLockedU16)
5265 IEMOP_HLP_DONE_DECODING();
5266 else
5267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5268 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5269 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5270 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5271 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5272 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5273 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5274 IEM_MC_FETCH_EFLAGS(EFlags);
5275
5276 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5277 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5278 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5279 else
5280 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5281 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5282
5283 IEM_MC_COMMIT_EFLAGS(EFlags);
5284 IEM_MC_ADVANCE_RIP();
5285 IEM_MC_END();
5286 return VINF_SUCCESS;
5287
5288 case IEMMODE_32BIT:
5289 IEM_MC_BEGIN(3, 2);
5290 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5291 IEM_MC_ARG(uint32_t, u32Src, 1);
5292 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5294 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5295
5296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5297 if (pImpl->pfnLockedU16)
5298 IEMOP_HLP_DONE_DECODING();
5299 else
5300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5301 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5302 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5303 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5304 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5305 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5306 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5307 IEM_MC_FETCH_EFLAGS(EFlags);
5308
5309 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5310 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5311 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5312 else
5313 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5314 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5315
5316 IEM_MC_COMMIT_EFLAGS(EFlags);
5317 IEM_MC_ADVANCE_RIP();
5318 IEM_MC_END();
5319 return VINF_SUCCESS;
5320
5321 case IEMMODE_64BIT:
5322 IEM_MC_BEGIN(3, 2);
5323 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5324 IEM_MC_ARG(uint64_t, u64Src, 1);
5325 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5327 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5328
5329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5330 if (pImpl->pfnLockedU16)
5331 IEMOP_HLP_DONE_DECODING();
5332 else
5333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5334 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5335 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5336 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5337 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5338 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5339 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5340 IEM_MC_FETCH_EFLAGS(EFlags);
5341
5342 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5343 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5344 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5345 else
5346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5347 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5348
5349 IEM_MC_COMMIT_EFLAGS(EFlags);
5350 IEM_MC_ADVANCE_RIP();
5351 IEM_MC_END();
5352 return VINF_SUCCESS;
5353
5354 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5355 }
5356 }
5357}
5358
5359
5360/** Opcode 0x0f 0xa3. */
5361FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5362{
5363 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5364 IEMOP_HLP_MIN_386();
5365 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5366}
5367
5368
5369/**
5370 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5371 */
5372FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5373{
5374 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5375 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5376
5377 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5378 {
5379 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5381
5382 switch (pVCpu->iem.s.enmEffOpSize)
5383 {
5384 case IEMMODE_16BIT:
5385 IEM_MC_BEGIN(4, 0);
5386 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5387 IEM_MC_ARG(uint16_t, u16Src, 1);
5388 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5389 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5390
5391 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5392 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5393 IEM_MC_REF_EFLAGS(pEFlags);
5394 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5395
5396 IEM_MC_ADVANCE_RIP();
5397 IEM_MC_END();
5398 return VINF_SUCCESS;
5399
5400 case IEMMODE_32BIT:
5401 IEM_MC_BEGIN(4, 0);
5402 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5403 IEM_MC_ARG(uint32_t, u32Src, 1);
5404 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5405 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5406
5407 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5408 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5409 IEM_MC_REF_EFLAGS(pEFlags);
5410 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5411
5412 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5413 IEM_MC_ADVANCE_RIP();
5414 IEM_MC_END();
5415 return VINF_SUCCESS;
5416
5417 case IEMMODE_64BIT:
5418 IEM_MC_BEGIN(4, 0);
5419 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5420 IEM_MC_ARG(uint64_t, u64Src, 1);
5421 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5422 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5423
5424 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5425 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5426 IEM_MC_REF_EFLAGS(pEFlags);
5427 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5428
5429 IEM_MC_ADVANCE_RIP();
5430 IEM_MC_END();
5431 return VINF_SUCCESS;
5432
5433 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5434 }
5435 }
5436 else
5437 {
5438 switch (pVCpu->iem.s.enmEffOpSize)
5439 {
5440 case IEMMODE_16BIT:
5441 IEM_MC_BEGIN(4, 2);
5442 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5443 IEM_MC_ARG(uint16_t, u16Src, 1);
5444 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5445 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5447
5448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5449 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5450 IEM_MC_ASSIGN(cShiftArg, cShift);
5451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5452 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5453 IEM_MC_FETCH_EFLAGS(EFlags);
5454 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5455 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5456
5457 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5458 IEM_MC_COMMIT_EFLAGS(EFlags);
5459 IEM_MC_ADVANCE_RIP();
5460 IEM_MC_END();
5461 return VINF_SUCCESS;
5462
5463 case IEMMODE_32BIT:
5464 IEM_MC_BEGIN(4, 2);
5465 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5466 IEM_MC_ARG(uint32_t, u32Src, 1);
5467 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5468 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5470
5471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5472 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5473 IEM_MC_ASSIGN(cShiftArg, cShift);
5474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5475 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5476 IEM_MC_FETCH_EFLAGS(EFlags);
5477 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5478 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5479
5480 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5481 IEM_MC_COMMIT_EFLAGS(EFlags);
5482 IEM_MC_ADVANCE_RIP();
5483 IEM_MC_END();
5484 return VINF_SUCCESS;
5485
5486 case IEMMODE_64BIT:
5487 IEM_MC_BEGIN(4, 2);
5488 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5489 IEM_MC_ARG(uint64_t, u64Src, 1);
5490 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5491 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5493
5494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5495 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5496 IEM_MC_ASSIGN(cShiftArg, cShift);
5497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5498 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5499 IEM_MC_FETCH_EFLAGS(EFlags);
5500 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5501 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5502
5503 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5504 IEM_MC_COMMIT_EFLAGS(EFlags);
5505 IEM_MC_ADVANCE_RIP();
5506 IEM_MC_END();
5507 return VINF_SUCCESS;
5508
5509 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5510 }
5511 }
5512}
5513
5514
5515/**
5516 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5517 */
5518FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5519{
5520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5521 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5522
5523 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5524 {
5525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5526
5527 switch (pVCpu->iem.s.enmEffOpSize)
5528 {
5529 case IEMMODE_16BIT:
5530 IEM_MC_BEGIN(4, 0);
5531 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5532 IEM_MC_ARG(uint16_t, u16Src, 1);
5533 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5534 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5535
5536 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5537 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5538 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5539 IEM_MC_REF_EFLAGS(pEFlags);
5540 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5541
5542 IEM_MC_ADVANCE_RIP();
5543 IEM_MC_END();
5544 return VINF_SUCCESS;
5545
5546 case IEMMODE_32BIT:
5547 IEM_MC_BEGIN(4, 0);
5548 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5549 IEM_MC_ARG(uint32_t, u32Src, 1);
5550 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5551 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5552
5553 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5554 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5555 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5556 IEM_MC_REF_EFLAGS(pEFlags);
5557 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5558
5559 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5560 IEM_MC_ADVANCE_RIP();
5561 IEM_MC_END();
5562 return VINF_SUCCESS;
5563
5564 case IEMMODE_64BIT:
5565 IEM_MC_BEGIN(4, 0);
5566 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5567 IEM_MC_ARG(uint64_t, u64Src, 1);
5568 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5569 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5570
5571 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5572 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5573 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5574 IEM_MC_REF_EFLAGS(pEFlags);
5575 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5576
5577 IEM_MC_ADVANCE_RIP();
5578 IEM_MC_END();
5579 return VINF_SUCCESS;
5580
5581 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5582 }
5583 }
5584 else
5585 {
5586 switch (pVCpu->iem.s.enmEffOpSize)
5587 {
5588 case IEMMODE_16BIT:
5589 IEM_MC_BEGIN(4, 2);
5590 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5591 IEM_MC_ARG(uint16_t, u16Src, 1);
5592 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5593 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5595
5596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5598 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5599 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5600 IEM_MC_FETCH_EFLAGS(EFlags);
5601 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5602 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5603
5604 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5605 IEM_MC_COMMIT_EFLAGS(EFlags);
5606 IEM_MC_ADVANCE_RIP();
5607 IEM_MC_END();
5608 return VINF_SUCCESS;
5609
5610 case IEMMODE_32BIT:
5611 IEM_MC_BEGIN(4, 2);
5612 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5613 IEM_MC_ARG(uint32_t, u32Src, 1);
5614 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5615 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5617
5618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5620 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5621 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5622 IEM_MC_FETCH_EFLAGS(EFlags);
5623 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5624 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5625
5626 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5627 IEM_MC_COMMIT_EFLAGS(EFlags);
5628 IEM_MC_ADVANCE_RIP();
5629 IEM_MC_END();
5630 return VINF_SUCCESS;
5631
5632 case IEMMODE_64BIT:
5633 IEM_MC_BEGIN(4, 2);
5634 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5635 IEM_MC_ARG(uint64_t, u64Src, 1);
5636 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5637 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5639
5640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5642 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5643 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5644 IEM_MC_FETCH_EFLAGS(EFlags);
5645 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5646 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5647
5648 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5649 IEM_MC_COMMIT_EFLAGS(EFlags);
5650 IEM_MC_ADVANCE_RIP();
5651 IEM_MC_END();
5652 return VINF_SUCCESS;
5653
5654 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5655 }
5656 }
5657}
5658
5659
5660
5661/** Opcode 0x0f 0xa4. */
5662FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5663{
5664 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5665 IEMOP_HLP_MIN_386();
5666 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5667}
5668
5669
5670/** Opcode 0x0f 0xa5. */
5671FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5672{
5673 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5674 IEMOP_HLP_MIN_386();
5675 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5676}
5677
5678
5679/** Opcode 0x0f 0xa8. */
5680FNIEMOP_DEF(iemOp_push_gs)
5681{
5682 IEMOP_MNEMONIC(push_gs, "push gs");
5683 IEMOP_HLP_MIN_386();
5684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5685 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5686}
5687
5688
5689/** Opcode 0x0f 0xa9. */
5690FNIEMOP_DEF(iemOp_pop_gs)
5691{
5692 IEMOP_MNEMONIC(pop_gs, "pop gs");
5693 IEMOP_HLP_MIN_386();
5694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5695 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5696}
5697
5698
5699/** Opcode 0x0f 0xaa. */
5700FNIEMOP_STUB(iemOp_rsm);
5701//IEMOP_HLP_MIN_386();
5702
5703
5704/** Opcode 0x0f 0xab. */
5705FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5706{
5707 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5708 IEMOP_HLP_MIN_386();
5709 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5710}
5711
5712
5713/** Opcode 0x0f 0xac. */
5714FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5715{
5716 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5717 IEMOP_HLP_MIN_386();
5718 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5719}
5720
5721
5722/** Opcode 0x0f 0xad. */
5723FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5724{
5725 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5726 IEMOP_HLP_MIN_386();
5727 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5728}
5729
5730
5731/** Opcode 0x0f 0xae mem/0. */
5732FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5733{
5734 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5735 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5736 return IEMOP_RAISE_INVALID_OPCODE();
5737
5738 IEM_MC_BEGIN(3, 1);
5739 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5740 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5741 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5744 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5745 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5746 IEM_MC_END();
5747 return VINF_SUCCESS;
5748}
5749
5750
5751/** Opcode 0x0f 0xae mem/1. */
5752FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5753{
5754 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5755 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5756 return IEMOP_RAISE_INVALID_OPCODE();
5757
5758 IEM_MC_BEGIN(3, 1);
5759 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5760 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5761 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5764 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5765 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5766 IEM_MC_END();
5767 return VINF_SUCCESS;
5768}
5769
5770
5771/** Opcode 0x0f 0xae mem/2. */
5772FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5773
5774/** Opcode 0x0f 0xae mem/3. */
5775FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5776
5777/** Opcode 0x0f 0xae mem/4. */
5778FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5779
5780/** Opcode 0x0f 0xae mem/5. */
5781FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5782
5783/** Opcode 0x0f 0xae mem/6. */
5784FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5785
5786/** Opcode 0x0f 0xae mem/7. */
5787FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5788
5789
5790/** Opcode 0x0f 0xae 11b/5. */
5791FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5792{
5793 RT_NOREF_PV(bRm);
5794 IEMOP_MNEMONIC(lfence, "lfence");
5795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5796 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5797 return IEMOP_RAISE_INVALID_OPCODE();
5798
5799 IEM_MC_BEGIN(0, 0);
5800 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5801 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5802 else
5803 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5804 IEM_MC_ADVANCE_RIP();
5805 IEM_MC_END();
5806 return VINF_SUCCESS;
5807}
5808
5809
5810/** Opcode 0x0f 0xae 11b/6. */
5811FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5812{
5813 RT_NOREF_PV(bRm);
5814 IEMOP_MNEMONIC(mfence, "mfence");
5815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5816 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5817 return IEMOP_RAISE_INVALID_OPCODE();
5818
5819 IEM_MC_BEGIN(0, 0);
5820 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5821 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5822 else
5823 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5824 IEM_MC_ADVANCE_RIP();
5825 IEM_MC_END();
5826 return VINF_SUCCESS;
5827}
5828
5829
5830/** Opcode 0x0f 0xae 11b/7. */
5831FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5832{
5833 RT_NOREF_PV(bRm);
5834 IEMOP_MNEMONIC(sfence, "sfence");
5835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5836 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5837 return IEMOP_RAISE_INVALID_OPCODE();
5838
5839 IEM_MC_BEGIN(0, 0);
5840 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5841 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5842 else
5843 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5844 IEM_MC_ADVANCE_RIP();
5845 IEM_MC_END();
5846 return VINF_SUCCESS;
5847}
5848
5849
5850/** Opcode 0xf3 0x0f 0xae 11b/0. */
5851FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5852
5853/** Opcode 0xf3 0x0f 0xae 11b/1. */
5854FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5855
5856/** Opcode 0xf3 0x0f 0xae 11b/2. */
5857FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5858
5859/** Opcode 0xf3 0x0f 0xae 11b/3. */
5860FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5861
5862
5863/** Opcode 0x0f 0xae. */
5864FNIEMOP_DEF(iemOp_Grp15)
5865{
5866/** @todo continue here tomorrow! (see bs3-cpu-decoding-1.c32 r113507). */
5867 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5868 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5869 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5870 {
5871 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5872 {
5873 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5874 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5875 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5876 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5877 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5878 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5879 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5880 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5881 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5882 }
5883 }
5884 else
5885 {
5886 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5887 {
5888 case 0:
5889 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5890 {
5891 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5892 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5893 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5894 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5895 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5896 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5897 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5898 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5899 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5900 }
5901 break;
5902
5903 case IEM_OP_PRF_REPZ:
5904 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5905 {
5906 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5907 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5908 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5909 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5910 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5911 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5912 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5913 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5914 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5915 }
5916 break;
5917
5918 default:
5919 return IEMOP_RAISE_INVALID_OPCODE();
5920 }
5921 }
5922}
5923
5924
5925/** Opcode 0x0f 0xaf. */
5926FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5927{
5928 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
5929 IEMOP_HLP_MIN_386();
5930 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5931 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5932}
5933
5934
5935/** Opcode 0x0f 0xb0. */
5936FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5937{
5938 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
5939 IEMOP_HLP_MIN_486();
5940 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5941
5942 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5943 {
5944 IEMOP_HLP_DONE_DECODING();
5945 IEM_MC_BEGIN(4, 0);
5946 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5947 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5948 IEM_MC_ARG(uint8_t, u8Src, 2);
5949 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5950
5951 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5952 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5953 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5954 IEM_MC_REF_EFLAGS(pEFlags);
5955 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5956 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5957 else
5958 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5959
5960 IEM_MC_ADVANCE_RIP();
5961 IEM_MC_END();
5962 }
5963 else
5964 {
5965 IEM_MC_BEGIN(4, 3);
5966 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5967 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5968 IEM_MC_ARG(uint8_t, u8Src, 2);
5969 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5971 IEM_MC_LOCAL(uint8_t, u8Al);
5972
5973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5974 IEMOP_HLP_DONE_DECODING();
5975 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5976 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5977 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5978 IEM_MC_FETCH_EFLAGS(EFlags);
5979 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5980 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5981 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5982 else
5983 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5984
5985 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5986 IEM_MC_COMMIT_EFLAGS(EFlags);
5987 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5988 IEM_MC_ADVANCE_RIP();
5989 IEM_MC_END();
5990 }
5991 return VINF_SUCCESS;
5992}
5993
5994/** Opcode 0x0f 0xb1. */
5995FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5996{
5997 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
5998 IEMOP_HLP_MIN_486();
5999 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6000
6001 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6002 {
6003 IEMOP_HLP_DONE_DECODING();
6004 switch (pVCpu->iem.s.enmEffOpSize)
6005 {
6006 case IEMMODE_16BIT:
6007 IEM_MC_BEGIN(4, 0);
6008 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6009 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6010 IEM_MC_ARG(uint16_t, u16Src, 2);
6011 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6012
6013 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6014 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6015 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6016 IEM_MC_REF_EFLAGS(pEFlags);
6017 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6018 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6019 else
6020 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6021
6022 IEM_MC_ADVANCE_RIP();
6023 IEM_MC_END();
6024 return VINF_SUCCESS;
6025
6026 case IEMMODE_32BIT:
6027 IEM_MC_BEGIN(4, 0);
6028 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6029 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6030 IEM_MC_ARG(uint32_t, u32Src, 2);
6031 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6032
6033 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6034 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6035 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6036 IEM_MC_REF_EFLAGS(pEFlags);
6037 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6038 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6039 else
6040 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6041
6042 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6043 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6044 IEM_MC_ADVANCE_RIP();
6045 IEM_MC_END();
6046 return VINF_SUCCESS;
6047
6048 case IEMMODE_64BIT:
6049 IEM_MC_BEGIN(4, 0);
6050 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6051 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6052#ifdef RT_ARCH_X86
6053 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6054#else
6055 IEM_MC_ARG(uint64_t, u64Src, 2);
6056#endif
6057 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6058
6059 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6060 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6061 IEM_MC_REF_EFLAGS(pEFlags);
6062#ifdef RT_ARCH_X86
6063 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6064 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6065 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6066 else
6067 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6068#else
6069 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6070 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6071 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6072 else
6073 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6074#endif
6075
6076 IEM_MC_ADVANCE_RIP();
6077 IEM_MC_END();
6078 return VINF_SUCCESS;
6079
6080 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6081 }
6082 }
6083 else
6084 {
6085 switch (pVCpu->iem.s.enmEffOpSize)
6086 {
6087 case IEMMODE_16BIT:
6088 IEM_MC_BEGIN(4, 3);
6089 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6090 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6091 IEM_MC_ARG(uint16_t, u16Src, 2);
6092 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6093 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6094 IEM_MC_LOCAL(uint16_t, u16Ax);
6095
6096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6097 IEMOP_HLP_DONE_DECODING();
6098 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6099 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6100 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6101 IEM_MC_FETCH_EFLAGS(EFlags);
6102 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6103 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6104 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6105 else
6106 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6107
6108 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6109 IEM_MC_COMMIT_EFLAGS(EFlags);
6110 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6111 IEM_MC_ADVANCE_RIP();
6112 IEM_MC_END();
6113 return VINF_SUCCESS;
6114
6115 case IEMMODE_32BIT:
6116 IEM_MC_BEGIN(4, 3);
6117 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6118 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6119 IEM_MC_ARG(uint32_t, u32Src, 2);
6120 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6121 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6122 IEM_MC_LOCAL(uint32_t, u32Eax);
6123
6124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6125 IEMOP_HLP_DONE_DECODING();
6126 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6127 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6128 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6129 IEM_MC_FETCH_EFLAGS(EFlags);
6130 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6131 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6132 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6133 else
6134 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6135
6136 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6137 IEM_MC_COMMIT_EFLAGS(EFlags);
6138 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6139 IEM_MC_ADVANCE_RIP();
6140 IEM_MC_END();
6141 return VINF_SUCCESS;
6142
6143 case IEMMODE_64BIT:
6144 IEM_MC_BEGIN(4, 3);
6145 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6146 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6147#ifdef RT_ARCH_X86
6148 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6149#else
6150 IEM_MC_ARG(uint64_t, u64Src, 2);
6151#endif
6152 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6154 IEM_MC_LOCAL(uint64_t, u64Rax);
6155
6156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6157 IEMOP_HLP_DONE_DECODING();
6158 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6159 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6160 IEM_MC_FETCH_EFLAGS(EFlags);
6161 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6162#ifdef RT_ARCH_X86
6163 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6164 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6165 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6166 else
6167 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6168#else
6169 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6170 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6171 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6172 else
6173 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6174#endif
6175
6176 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6177 IEM_MC_COMMIT_EFLAGS(EFlags);
6178 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6179 IEM_MC_ADVANCE_RIP();
6180 IEM_MC_END();
6181 return VINF_SUCCESS;
6182
6183 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6184 }
6185 }
6186}
6187
6188
6189FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6190{
6191 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6192 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6193
6194 switch (pVCpu->iem.s.enmEffOpSize)
6195 {
6196 case IEMMODE_16BIT:
6197 IEM_MC_BEGIN(5, 1);
6198 IEM_MC_ARG(uint16_t, uSel, 0);
6199 IEM_MC_ARG(uint16_t, offSeg, 1);
6200 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6201 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6202 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6203 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6206 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6207 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6208 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6209 IEM_MC_END();
6210 return VINF_SUCCESS;
6211
6212 case IEMMODE_32BIT:
6213 IEM_MC_BEGIN(5, 1);
6214 IEM_MC_ARG(uint16_t, uSel, 0);
6215 IEM_MC_ARG(uint32_t, offSeg, 1);
6216 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6217 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6218 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6219 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6220 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6222 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6223 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6224 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6225 IEM_MC_END();
6226 return VINF_SUCCESS;
6227
6228 case IEMMODE_64BIT:
6229 IEM_MC_BEGIN(5, 1);
6230 IEM_MC_ARG(uint16_t, uSel, 0);
6231 IEM_MC_ARG(uint64_t, offSeg, 1);
6232 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6233 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6234 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6235 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6238 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6239 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6240 else
6241 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6242 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6243 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6244 IEM_MC_END();
6245 return VINF_SUCCESS;
6246
6247 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6248 }
6249}
6250
6251
6252/** Opcode 0x0f 0xb2. */
6253FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6254{
6255 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6256 IEMOP_HLP_MIN_386();
6257 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6258 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6259 return IEMOP_RAISE_INVALID_OPCODE();
6260 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6261}
6262
6263
6264/** Opcode 0x0f 0xb3. */
6265FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6266{
6267 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6268 IEMOP_HLP_MIN_386();
6269 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6270}
6271
6272
6273/** Opcode 0x0f 0xb4. */
6274FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6275{
6276 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6277 IEMOP_HLP_MIN_386();
6278 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6279 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6280 return IEMOP_RAISE_INVALID_OPCODE();
6281 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6282}
6283
6284
6285/** Opcode 0x0f 0xb5. */
6286FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6287{
6288 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6289 IEMOP_HLP_MIN_386();
6290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6291 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6292 return IEMOP_RAISE_INVALID_OPCODE();
6293 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6294}
6295
6296
6297/** Opcode 0x0f 0xb6. */
6298FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6299{
6300 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6301 IEMOP_HLP_MIN_386();
6302
6303 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6304
6305 /*
6306 * If rm is denoting a register, no more instruction bytes.
6307 */
6308 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6309 {
6310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6311 switch (pVCpu->iem.s.enmEffOpSize)
6312 {
6313 case IEMMODE_16BIT:
6314 IEM_MC_BEGIN(0, 1);
6315 IEM_MC_LOCAL(uint16_t, u16Value);
6316 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6317 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6318 IEM_MC_ADVANCE_RIP();
6319 IEM_MC_END();
6320 return VINF_SUCCESS;
6321
6322 case IEMMODE_32BIT:
6323 IEM_MC_BEGIN(0, 1);
6324 IEM_MC_LOCAL(uint32_t, u32Value);
6325 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6326 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6327 IEM_MC_ADVANCE_RIP();
6328 IEM_MC_END();
6329 return VINF_SUCCESS;
6330
6331 case IEMMODE_64BIT:
6332 IEM_MC_BEGIN(0, 1);
6333 IEM_MC_LOCAL(uint64_t, u64Value);
6334 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6335 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6336 IEM_MC_ADVANCE_RIP();
6337 IEM_MC_END();
6338 return VINF_SUCCESS;
6339
6340 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6341 }
6342 }
6343 else
6344 {
6345 /*
6346 * We're loading a register from memory.
6347 */
6348 switch (pVCpu->iem.s.enmEffOpSize)
6349 {
6350 case IEMMODE_16BIT:
6351 IEM_MC_BEGIN(0, 2);
6352 IEM_MC_LOCAL(uint16_t, u16Value);
6353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6356 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6357 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6358 IEM_MC_ADVANCE_RIP();
6359 IEM_MC_END();
6360 return VINF_SUCCESS;
6361
6362 case IEMMODE_32BIT:
6363 IEM_MC_BEGIN(0, 2);
6364 IEM_MC_LOCAL(uint32_t, u32Value);
6365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6368 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6369 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6370 IEM_MC_ADVANCE_RIP();
6371 IEM_MC_END();
6372 return VINF_SUCCESS;
6373
6374 case IEMMODE_64BIT:
6375 IEM_MC_BEGIN(0, 2);
6376 IEM_MC_LOCAL(uint64_t, u64Value);
6377 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6380 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6381 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6382 IEM_MC_ADVANCE_RIP();
6383 IEM_MC_END();
6384 return VINF_SUCCESS;
6385
6386 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6387 }
6388 }
6389}
6390
6391
6392/** Opcode 0x0f 0xb7. */
6393FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6394{
6395 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6396 IEMOP_HLP_MIN_386();
6397
6398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6399
6400 /** @todo Not entirely sure how the operand size prefix is handled here,
6401 * assuming that it will be ignored. Would be nice to have a few
6402 * test for this. */
6403 /*
6404 * If rm is denoting a register, no more instruction bytes.
6405 */
6406 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6407 {
6408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6409 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6410 {
6411 IEM_MC_BEGIN(0, 1);
6412 IEM_MC_LOCAL(uint32_t, u32Value);
6413 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6414 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6415 IEM_MC_ADVANCE_RIP();
6416 IEM_MC_END();
6417 }
6418 else
6419 {
6420 IEM_MC_BEGIN(0, 1);
6421 IEM_MC_LOCAL(uint64_t, u64Value);
6422 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6423 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6424 IEM_MC_ADVANCE_RIP();
6425 IEM_MC_END();
6426 }
6427 }
6428 else
6429 {
6430 /*
6431 * We're loading a register from memory.
6432 */
6433 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6434 {
6435 IEM_MC_BEGIN(0, 2);
6436 IEM_MC_LOCAL(uint32_t, u32Value);
6437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6440 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6441 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6442 IEM_MC_ADVANCE_RIP();
6443 IEM_MC_END();
6444 }
6445 else
6446 {
6447 IEM_MC_BEGIN(0, 2);
6448 IEM_MC_LOCAL(uint64_t, u64Value);
6449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6452 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6453 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6454 IEM_MC_ADVANCE_RIP();
6455 IEM_MC_END();
6456 }
6457 }
6458 return VINF_SUCCESS;
6459}
6460
6461
6462/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6463FNIEMOP_UD_STUB(iemOp_jmpe);
6464/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6465FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6466
6467
6468/** Opcode 0x0f 0xb9. */
6469FNIEMOP_DEF(iemOp_Grp10)
6470{
6471 Log(("iemOp_Grp10 -> #UD\n"));
6472 return IEMOP_RAISE_INVALID_OPCODE();
6473}
6474
6475
6476/** Opcode 0x0f 0xba. */
6477FNIEMOP_DEF(iemOp_Grp8)
6478{
6479 IEMOP_HLP_MIN_386();
6480 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6481 PCIEMOPBINSIZES pImpl;
6482 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6483 {
6484 case 0: case 1: case 2: case 3:
6485 return IEMOP_RAISE_INVALID_OPCODE();
6486 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6487 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6488 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6489 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6491 }
6492 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6493
6494 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6495 {
6496 /* register destination. */
6497 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6499
6500 switch (pVCpu->iem.s.enmEffOpSize)
6501 {
6502 case IEMMODE_16BIT:
6503 IEM_MC_BEGIN(3, 0);
6504 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6505 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6506 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6507
6508 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6509 IEM_MC_REF_EFLAGS(pEFlags);
6510 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6511
6512 IEM_MC_ADVANCE_RIP();
6513 IEM_MC_END();
6514 return VINF_SUCCESS;
6515
6516 case IEMMODE_32BIT:
6517 IEM_MC_BEGIN(3, 0);
6518 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6519 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6520 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6521
6522 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6523 IEM_MC_REF_EFLAGS(pEFlags);
6524 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6525
6526 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6527 IEM_MC_ADVANCE_RIP();
6528 IEM_MC_END();
6529 return VINF_SUCCESS;
6530
6531 case IEMMODE_64BIT:
6532 IEM_MC_BEGIN(3, 0);
6533 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6534 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6535 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6536
6537 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6538 IEM_MC_REF_EFLAGS(pEFlags);
6539 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6540
6541 IEM_MC_ADVANCE_RIP();
6542 IEM_MC_END();
6543 return VINF_SUCCESS;
6544
6545 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6546 }
6547 }
6548 else
6549 {
6550 /* memory destination. */
6551
6552 uint32_t fAccess;
6553 if (pImpl->pfnLockedU16)
6554 fAccess = IEM_ACCESS_DATA_RW;
6555 else /* BT */
6556 fAccess = IEM_ACCESS_DATA_R;
6557
6558 /** @todo test negative bit offsets! */
6559 switch (pVCpu->iem.s.enmEffOpSize)
6560 {
6561 case IEMMODE_16BIT:
6562 IEM_MC_BEGIN(3, 1);
6563 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6564 IEM_MC_ARG(uint16_t, u16Src, 1);
6565 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6567
6568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6569 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6570 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6571 if (pImpl->pfnLockedU16)
6572 IEMOP_HLP_DONE_DECODING();
6573 else
6574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6575 IEM_MC_FETCH_EFLAGS(EFlags);
6576 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6577 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6578 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6579 else
6580 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6581 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6582
6583 IEM_MC_COMMIT_EFLAGS(EFlags);
6584 IEM_MC_ADVANCE_RIP();
6585 IEM_MC_END();
6586 return VINF_SUCCESS;
6587
6588 case IEMMODE_32BIT:
6589 IEM_MC_BEGIN(3, 1);
6590 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6591 IEM_MC_ARG(uint32_t, u32Src, 1);
6592 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6593 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6594
6595 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6596 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6597 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6598 if (pImpl->pfnLockedU16)
6599 IEMOP_HLP_DONE_DECODING();
6600 else
6601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6602 IEM_MC_FETCH_EFLAGS(EFlags);
6603 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6604 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6605 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6606 else
6607 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6608 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6609
6610 IEM_MC_COMMIT_EFLAGS(EFlags);
6611 IEM_MC_ADVANCE_RIP();
6612 IEM_MC_END();
6613 return VINF_SUCCESS;
6614
6615 case IEMMODE_64BIT:
6616 IEM_MC_BEGIN(3, 1);
6617 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6618 IEM_MC_ARG(uint64_t, u64Src, 1);
6619 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6620 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6621
6622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6623 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6624 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6625 if (pImpl->pfnLockedU16)
6626 IEMOP_HLP_DONE_DECODING();
6627 else
6628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6629 IEM_MC_FETCH_EFLAGS(EFlags);
6630 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6631 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6632 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6633 else
6634 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6635 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6636
6637 IEM_MC_COMMIT_EFLAGS(EFlags);
6638 IEM_MC_ADVANCE_RIP();
6639 IEM_MC_END();
6640 return VINF_SUCCESS;
6641
6642 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6643 }
6644 }
6645
6646}
6647
6648
6649/** Opcode 0x0f 0xbb. */
6650FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6651{
6652 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6653 IEMOP_HLP_MIN_386();
6654 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6655}
6656
6657
6658/** Opcode 0x0f 0xbc. */
6659FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6660{
6661 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6662 IEMOP_HLP_MIN_386();
6663 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6664 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6665}
6666
6667
6668/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6669FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6670
6671
6672/** Opcode 0x0f 0xbd. */
6673FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6674{
6675 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6676 IEMOP_HLP_MIN_386();
6677 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6678 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6679}
6680
6681
6682/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6683FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6684
6685
6686/** Opcode 0x0f 0xbe. */
6687FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6688{
6689 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6690 IEMOP_HLP_MIN_386();
6691
6692 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6693
6694 /*
6695 * If rm is denoting a register, no more instruction bytes.
6696 */
6697 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6698 {
6699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6700 switch (pVCpu->iem.s.enmEffOpSize)
6701 {
6702 case IEMMODE_16BIT:
6703 IEM_MC_BEGIN(0, 1);
6704 IEM_MC_LOCAL(uint16_t, u16Value);
6705 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6706 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6707 IEM_MC_ADVANCE_RIP();
6708 IEM_MC_END();
6709 return VINF_SUCCESS;
6710
6711 case IEMMODE_32BIT:
6712 IEM_MC_BEGIN(0, 1);
6713 IEM_MC_LOCAL(uint32_t, u32Value);
6714 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6715 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6716 IEM_MC_ADVANCE_RIP();
6717 IEM_MC_END();
6718 return VINF_SUCCESS;
6719
6720 case IEMMODE_64BIT:
6721 IEM_MC_BEGIN(0, 1);
6722 IEM_MC_LOCAL(uint64_t, u64Value);
6723 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6724 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6725 IEM_MC_ADVANCE_RIP();
6726 IEM_MC_END();
6727 return VINF_SUCCESS;
6728
6729 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6730 }
6731 }
6732 else
6733 {
6734 /*
6735 * We're loading a register from memory.
6736 */
6737 switch (pVCpu->iem.s.enmEffOpSize)
6738 {
6739 case IEMMODE_16BIT:
6740 IEM_MC_BEGIN(0, 2);
6741 IEM_MC_LOCAL(uint16_t, u16Value);
6742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6745 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6746 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6747 IEM_MC_ADVANCE_RIP();
6748 IEM_MC_END();
6749 return VINF_SUCCESS;
6750
6751 case IEMMODE_32BIT:
6752 IEM_MC_BEGIN(0, 2);
6753 IEM_MC_LOCAL(uint32_t, u32Value);
6754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6757 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6758 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6759 IEM_MC_ADVANCE_RIP();
6760 IEM_MC_END();
6761 return VINF_SUCCESS;
6762
6763 case IEMMODE_64BIT:
6764 IEM_MC_BEGIN(0, 2);
6765 IEM_MC_LOCAL(uint64_t, u64Value);
6766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6769 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6770 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6771 IEM_MC_ADVANCE_RIP();
6772 IEM_MC_END();
6773 return VINF_SUCCESS;
6774
6775 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6776 }
6777 }
6778}
6779
6780
6781/** Opcode 0x0f 0xbf. */
6782FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6783{
6784 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6785 IEMOP_HLP_MIN_386();
6786
6787 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6788
6789 /** @todo Not entirely sure how the operand size prefix is handled here,
6790 * assuming that it will be ignored. Would be nice to have a few
6791 * test for this. */
6792 /*
6793 * If rm is denoting a register, no more instruction bytes.
6794 */
6795 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6796 {
6797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6798 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6799 {
6800 IEM_MC_BEGIN(0, 1);
6801 IEM_MC_LOCAL(uint32_t, u32Value);
6802 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6803 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6804 IEM_MC_ADVANCE_RIP();
6805 IEM_MC_END();
6806 }
6807 else
6808 {
6809 IEM_MC_BEGIN(0, 1);
6810 IEM_MC_LOCAL(uint64_t, u64Value);
6811 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6812 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6813 IEM_MC_ADVANCE_RIP();
6814 IEM_MC_END();
6815 }
6816 }
6817 else
6818 {
6819 /*
6820 * We're loading a register from memory.
6821 */
6822 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6823 {
6824 IEM_MC_BEGIN(0, 2);
6825 IEM_MC_LOCAL(uint32_t, u32Value);
6826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6829 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6830 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6831 IEM_MC_ADVANCE_RIP();
6832 IEM_MC_END();
6833 }
6834 else
6835 {
6836 IEM_MC_BEGIN(0, 2);
6837 IEM_MC_LOCAL(uint64_t, u64Value);
6838 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6841 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6842 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6843 IEM_MC_ADVANCE_RIP();
6844 IEM_MC_END();
6845 }
6846 }
6847 return VINF_SUCCESS;
6848}
6849
6850
6851/** Opcode 0x0f 0xc0. */
6852FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6853{
6854 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6855 IEMOP_HLP_MIN_486();
6856 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6857
6858 /*
6859 * If rm is denoting a register, no more instruction bytes.
6860 */
6861 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6862 {
6863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6864
6865 IEM_MC_BEGIN(3, 0);
6866 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6867 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6868 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6869
6870 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6871 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6872 IEM_MC_REF_EFLAGS(pEFlags);
6873 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6874
6875 IEM_MC_ADVANCE_RIP();
6876 IEM_MC_END();
6877 }
6878 else
6879 {
6880 /*
6881 * We're accessing memory.
6882 */
6883 IEM_MC_BEGIN(3, 3);
6884 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6885 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6886 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6887 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6888 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6889
6890 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6891 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6892 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6893 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6894 IEM_MC_FETCH_EFLAGS(EFlags);
6895 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6896 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6897 else
6898 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6899
6900 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6901 IEM_MC_COMMIT_EFLAGS(EFlags);
6902 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6903 IEM_MC_ADVANCE_RIP();
6904 IEM_MC_END();
6905 return VINF_SUCCESS;
6906 }
6907 return VINF_SUCCESS;
6908}
6909
6910
6911/** Opcode 0x0f 0xc1. */
6912FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6913{
6914 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
6915 IEMOP_HLP_MIN_486();
6916 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6917
6918 /*
6919 * If rm is denoting a register, no more instruction bytes.
6920 */
6921 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6922 {
6923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6924
6925 switch (pVCpu->iem.s.enmEffOpSize)
6926 {
6927 case IEMMODE_16BIT:
6928 IEM_MC_BEGIN(3, 0);
6929 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6930 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6931 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6932
6933 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6934 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6935 IEM_MC_REF_EFLAGS(pEFlags);
6936 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6937
6938 IEM_MC_ADVANCE_RIP();
6939 IEM_MC_END();
6940 return VINF_SUCCESS;
6941
6942 case IEMMODE_32BIT:
6943 IEM_MC_BEGIN(3, 0);
6944 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6945 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6946 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6947
6948 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6949 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6950 IEM_MC_REF_EFLAGS(pEFlags);
6951 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6952
6953 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6954 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6955 IEM_MC_ADVANCE_RIP();
6956 IEM_MC_END();
6957 return VINF_SUCCESS;
6958
6959 case IEMMODE_64BIT:
6960 IEM_MC_BEGIN(3, 0);
6961 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6962 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6963 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6964
6965 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6966 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6967 IEM_MC_REF_EFLAGS(pEFlags);
6968 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6969
6970 IEM_MC_ADVANCE_RIP();
6971 IEM_MC_END();
6972 return VINF_SUCCESS;
6973
6974 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6975 }
6976 }
6977 else
6978 {
6979 /*
6980 * We're accessing memory.
6981 */
6982 switch (pVCpu->iem.s.enmEffOpSize)
6983 {
6984 case IEMMODE_16BIT:
6985 IEM_MC_BEGIN(3, 3);
6986 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6987 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6988 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6989 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6991
6992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6993 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6994 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6995 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6996 IEM_MC_FETCH_EFLAGS(EFlags);
6997 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6998 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6999 else
7000 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7001
7002 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7003 IEM_MC_COMMIT_EFLAGS(EFlags);
7004 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7005 IEM_MC_ADVANCE_RIP();
7006 IEM_MC_END();
7007 return VINF_SUCCESS;
7008
7009 case IEMMODE_32BIT:
7010 IEM_MC_BEGIN(3, 3);
7011 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7012 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7013 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7014 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7015 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7016
7017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7018 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7019 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7020 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7021 IEM_MC_FETCH_EFLAGS(EFlags);
7022 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7023 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7024 else
7025 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7026
7027 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7028 IEM_MC_COMMIT_EFLAGS(EFlags);
7029 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7030 IEM_MC_ADVANCE_RIP();
7031 IEM_MC_END();
7032 return VINF_SUCCESS;
7033
7034 case IEMMODE_64BIT:
7035 IEM_MC_BEGIN(3, 3);
7036 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7037 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7038 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7039 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7041
7042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7043 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7044 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7045 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7046 IEM_MC_FETCH_EFLAGS(EFlags);
7047 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7048 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7049 else
7050 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7051
7052 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7053 IEM_MC_COMMIT_EFLAGS(EFlags);
7054 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7055 IEM_MC_ADVANCE_RIP();
7056 IEM_MC_END();
7057 return VINF_SUCCESS;
7058
7059 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7060 }
7061 }
7062}
7063
7064
7065/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
7066FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
7067/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
7068FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
7069/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
7070FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
7071/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
7072FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
7073
7074
7075/** Opcode 0x0f 0xc3. */
7076FNIEMOP_DEF(iemOp_movnti_My_Gy)
7077{
7078 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7079
7080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7081
7082 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7083 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7084 {
7085 switch (pVCpu->iem.s.enmEffOpSize)
7086 {
7087 case IEMMODE_32BIT:
7088 IEM_MC_BEGIN(0, 2);
7089 IEM_MC_LOCAL(uint32_t, u32Value);
7090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7091
7092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7094 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7095 return IEMOP_RAISE_INVALID_OPCODE();
7096
7097 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7098 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7099 IEM_MC_ADVANCE_RIP();
7100 IEM_MC_END();
7101 break;
7102
7103 case IEMMODE_64BIT:
7104 IEM_MC_BEGIN(0, 2);
7105 IEM_MC_LOCAL(uint64_t, u64Value);
7106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7107
7108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7110 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7111 return IEMOP_RAISE_INVALID_OPCODE();
7112
7113 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7114 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7115 IEM_MC_ADVANCE_RIP();
7116 IEM_MC_END();
7117 break;
7118
7119 case IEMMODE_16BIT:
7120 /** @todo check this form. */
7121 return IEMOP_RAISE_INVALID_OPCODE();
7122 }
7123 }
7124 else
7125 return IEMOP_RAISE_INVALID_OPCODE();
7126 return VINF_SUCCESS;
7127}
7128/* Opcode 0x66 0x0f 0xc3 - invalid */
7129/* Opcode 0xf3 0x0f 0xc3 - invalid */
7130/* Opcode 0xf2 0x0f 0xc3 - invalid */
7131
7132/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
7133FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7134/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
7135FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
7136/* Opcode 0xf3 0x0f 0xc4 - invalid */
7137/* Opcode 0xf2 0x0f 0xc4 - invalid */
7138
7139/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7140FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7141/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
7142FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
7143/* Opcode 0xf3 0x0f 0xc5 - invalid */
7144/* Opcode 0xf2 0x0f 0xc5 - invalid */
7145
7146/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
7147FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
7148/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
7149FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
7150/* Opcode 0xf3 0x0f 0xc6 - invalid */
7151/* Opcode 0xf2 0x0f 0xc6 - invalid */
7152
7153
7154/** Opcode 0x0f 0xc7 !11/1. */
7155FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7156{
7157 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7158
7159 IEM_MC_BEGIN(4, 3);
7160 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7161 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7162 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7163 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7164 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7165 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7167
7168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7169 IEMOP_HLP_DONE_DECODING();
7170 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7171
7172 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7173 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7174 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7175
7176 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7177 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7178 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7179
7180 IEM_MC_FETCH_EFLAGS(EFlags);
7181 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7182 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7183 else
7184 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7185
7186 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7187 IEM_MC_COMMIT_EFLAGS(EFlags);
7188 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7189 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7190 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7191 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7192 IEM_MC_ENDIF();
7193 IEM_MC_ADVANCE_RIP();
7194
7195 IEM_MC_END();
7196 return VINF_SUCCESS;
7197}
7198
7199
7200/** Opcode REX.W 0x0f 0xc7 !11/1. */
7201FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7202{
7203 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7204 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7205 {
7206#if 0
7207 RT_NOREF(bRm);
7208 IEMOP_BITCH_ABOUT_STUB();
7209 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7210#else
7211 IEM_MC_BEGIN(4, 3);
7212 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7213 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7214 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7215 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7216 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7217 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7218 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7219
7220 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7221 IEMOP_HLP_DONE_DECODING();
7222 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7223 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7224
7225 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7226 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7227 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7228
7229 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7230 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7231 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7232
7233 IEM_MC_FETCH_EFLAGS(EFlags);
7234# ifdef RT_ARCH_AMD64
7235 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7236 {
7237 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7238 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7239 else
7240 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7241 }
7242 else
7243# endif
7244 {
7245 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7246 accesses and not all all atomic, which works fine on in UNI CPU guest
7247 configuration (ignoring DMA). If guest SMP is active we have no choice
7248 but to use a rendezvous callback here. Sigh. */
7249 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7250 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7251 else
7252 {
7253 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7254 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7255 }
7256 }
7257
7258 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7259 IEM_MC_COMMIT_EFLAGS(EFlags);
7260 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7261 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7262 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7263 IEM_MC_ENDIF();
7264 IEM_MC_ADVANCE_RIP();
7265
7266 IEM_MC_END();
7267 return VINF_SUCCESS;
7268#endif
7269 }
7270 Log(("cmpxchg16b -> #UD\n"));
7271 return IEMOP_RAISE_INVALID_OPCODE();
7272}
7273
7274
7275/** Opcode 0x0f 0xc7 11/6. */
7276FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7277
7278/** Opcode 0x0f 0xc7 !11/6. */
7279FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7280
7281/** Opcode 0x66 0x0f 0xc7 !11/6. */
7282FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7283
7284/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7285FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7286
7287/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7288FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7289
7290
7291/** Opcode 0x0f 0xc7. */
7292FNIEMOP_DEF(iemOp_Grp9)
7293{
7294 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
7295 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7296 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7297 {
7298 case 0: case 2: case 3: case 4: case 5:
7299 return IEMOP_RAISE_INVALID_OPCODE();
7300 case 1:
7301 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
7302 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
7303 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
7304 return IEMOP_RAISE_INVALID_OPCODE();
7305 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7306 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7307 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7308 case 6:
7309 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7310 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
7311 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7312 {
7313 case 0:
7314 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
7315 case IEM_OP_PRF_SIZE_OP:
7316 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
7317 case IEM_OP_PRF_REPZ:
7318 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
7319 default:
7320 return IEMOP_RAISE_INVALID_OPCODE();
7321 }
7322 case 7:
7323 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7324 {
7325 case 0:
7326 case IEM_OP_PRF_REPZ:
7327 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
7328 default:
7329 return IEMOP_RAISE_INVALID_OPCODE();
7330 }
7331 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7332 }
7333}
7334
7335
7336/**
7337 * Common 'bswap register' helper.
7338 */
7339FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7340{
7341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7342 switch (pVCpu->iem.s.enmEffOpSize)
7343 {
7344 case IEMMODE_16BIT:
7345 IEM_MC_BEGIN(1, 0);
7346 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7347 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7348 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7349 IEM_MC_ADVANCE_RIP();
7350 IEM_MC_END();
7351 return VINF_SUCCESS;
7352
7353 case IEMMODE_32BIT:
7354 IEM_MC_BEGIN(1, 0);
7355 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7356 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7357 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7358 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7359 IEM_MC_ADVANCE_RIP();
7360 IEM_MC_END();
7361 return VINF_SUCCESS;
7362
7363 case IEMMODE_64BIT:
7364 IEM_MC_BEGIN(1, 0);
7365 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7366 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7367 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7368 IEM_MC_ADVANCE_RIP();
7369 IEM_MC_END();
7370 return VINF_SUCCESS;
7371
7372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7373 }
7374}
7375
7376
7377/** Opcode 0x0f 0xc8. */
7378FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7379{
7380 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7381 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7382 prefix. REX.B is the correct prefix it appears. For a parallel
7383 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7384 IEMOP_HLP_MIN_486();
7385 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7386}
7387
7388
7389/** Opcode 0x0f 0xc9. */
7390FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7391{
7392 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7393 IEMOP_HLP_MIN_486();
7394 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7395}
7396
7397
7398/** Opcode 0x0f 0xca. */
7399FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7400{
7401 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7402 IEMOP_HLP_MIN_486();
7403 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7404}
7405
7406
7407/** Opcode 0x0f 0xcb. */
7408FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7409{
7410 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7411 IEMOP_HLP_MIN_486();
7412 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7413}
7414
7415
7416/** Opcode 0x0f 0xcc. */
7417FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7418{
7419 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7420 IEMOP_HLP_MIN_486();
7421 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7422}
7423
7424
7425/** Opcode 0x0f 0xcd. */
7426FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7427{
7428 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7429 IEMOP_HLP_MIN_486();
7430 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7431}
7432
7433
7434/** Opcode 0x0f 0xce. */
7435FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7436{
7437 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7438 IEMOP_HLP_MIN_486();
7439 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7440}
7441
7442
7443/** Opcode 0x0f 0xcf. */
7444FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7445{
7446 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7447 IEMOP_HLP_MIN_486();
7448 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7449}
7450
7451
7452/* Opcode 0x0f 0xd0 - invalid */
7453/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7454FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7455/* Opcode 0xf3 0x0f 0xd0 - invalid */
7456/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7457FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7458
7459/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7460FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7461/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7462FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7463/* Opcode 0xf3 0x0f 0xd1 - invalid */
7464/* Opcode 0xf2 0x0f 0xd1 - invalid */
7465
7466/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7467FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7468/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7469FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7470/* Opcode 0xf3 0x0f 0xd2 - invalid */
7471/* Opcode 0xf2 0x0f 0xd2 - invalid */
7472
7473/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7474FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7475/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7476FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7477/* Opcode 0xf3 0x0f 0xd3 - invalid */
7478/* Opcode 0xf2 0x0f 0xd3 - invalid */
7479
7480/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7481FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7482/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7483FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7484/* Opcode 0xf3 0x0f 0xd4 - invalid */
7485/* Opcode 0xf2 0x0f 0xd4 - invalid */
7486
7487/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7488FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7489/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7490FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7491/* Opcode 0xf3 0x0f 0xd5 - invalid */
7492/* Opcode 0xf2 0x0f 0xd5 - invalid */
7493
7494/* Opcode 0x0f 0xd6 - invalid */
7495/** Opcode 0x66 0x0f 0xd6 - vmovq Wq, Vq */
7496FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
7497/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7498FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7499/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7500FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7501#if 0
7502FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7503{
7504 /* Docs says register only. */
7505 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7506
7507 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7508 {
7509 case IEM_OP_PRF_SIZE_OP: /* SSE */
7510 IEMOP_MNEMONIC(movq_Wq_Vq, "movq Wq,Vq");
7511 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7512 IEM_MC_BEGIN(2, 0);
7513 IEM_MC_ARG(uint64_t *, pDst, 0);
7514 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
7515 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7516 IEM_MC_PREPARE_SSE_USAGE();
7517 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7518 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7519 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7520 IEM_MC_ADVANCE_RIP();
7521 IEM_MC_END();
7522 return VINF_SUCCESS;
7523
7524 case 0: /* MMX */
7525 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7526 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7527 IEM_MC_BEGIN(2, 0);
7528 IEM_MC_ARG(uint64_t *, pDst, 0);
7529 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7530 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7531 IEM_MC_PREPARE_FPU_USAGE();
7532 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7533 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7534 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7535 IEM_MC_ADVANCE_RIP();
7536 IEM_MC_END();
7537 return VINF_SUCCESS;
7538
7539 default:
7540 return IEMOP_RAISE_INVALID_OPCODE();
7541 }
7542}
7543#endif
7544
7545
7546/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
7547FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
7548{
7549 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7550 /** @todo testcase: Check that the instruction implicitly clears the high
7551 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7552 * and opcode modifications are made to work with the whole width (not
7553 * just 128). */
7554 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
7555 /* Docs says register only. */
7556 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7557 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7558 {
7559 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7560 IEM_MC_BEGIN(2, 0);
7561 IEM_MC_ARG(uint64_t *, pDst, 0);
7562 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7563 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7564 IEM_MC_PREPARE_FPU_USAGE();
7565 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7566 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7567 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7568 IEM_MC_ADVANCE_RIP();
7569 IEM_MC_END();
7570 return VINF_SUCCESS;
7571 }
7572 return IEMOP_RAISE_INVALID_OPCODE();
7573}
7574
7575/** Opcode 0x66 0x0f 0xd7 - */
7576FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
7577{
7578 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7579 /** @todo testcase: Check that the instruction implicitly clears the high
7580 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7581 * and opcode modifications are made to work with the whole width (not
7582 * just 128). */
7583 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
7584 /* Docs says register only. */
7585 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7586 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7587 {
7588 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7589 IEM_MC_BEGIN(2, 0);
7590 IEM_MC_ARG(uint64_t *, pDst, 0);
7591 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
7592 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7593 IEM_MC_PREPARE_SSE_USAGE();
7594 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7595 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7596 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7597 IEM_MC_ADVANCE_RIP();
7598 IEM_MC_END();
7599 return VINF_SUCCESS;
7600 }
7601 return IEMOP_RAISE_INVALID_OPCODE();
7602}
7603
7604/* Opcode 0xf3 0x0f 0xd7 - invalid */
7605/* Opcode 0xf2 0x0f 0xd7 - invalid */
7606
7607
7608/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7609FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7610/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
7611FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
7612/* Opcode 0xf3 0x0f 0xd8 - invalid */
7613/* Opcode 0xf2 0x0f 0xd8 - invalid */
7614
7615/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7616FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7617/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
7618FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
7619/* Opcode 0xf3 0x0f 0xd9 - invalid */
7620/* Opcode 0xf2 0x0f 0xd9 - invalid */
7621
7622/** Opcode 0x0f 0xda - pminub Pq, Qq */
7623FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7624/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
7625FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
7626/* Opcode 0xf3 0x0f 0xda - invalid */
7627/* Opcode 0xf2 0x0f 0xda - invalid */
7628
7629/** Opcode 0x0f 0xdb - pand Pq, Qq */
7630FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7631/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
7632FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
7633/* Opcode 0xf3 0x0f 0xdb - invalid */
7634/* Opcode 0xf2 0x0f 0xdb - invalid */
7635
7636/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7637FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7638/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
7639FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
7640/* Opcode 0xf3 0x0f 0xdc - invalid */
7641/* Opcode 0xf2 0x0f 0xdc - invalid */
7642
7643/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7644FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7645/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
7646FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
7647/* Opcode 0xf3 0x0f 0xdd - invalid */
7648/* Opcode 0xf2 0x0f 0xdd - invalid */
7649
7650/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7651FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7652/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
7653FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
7654/* Opcode 0xf3 0x0f 0xde - invalid */
7655/* Opcode 0xf2 0x0f 0xde - invalid */
7656
7657/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7658FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7659/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
7660FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
7661/* Opcode 0xf3 0x0f 0xdf - invalid */
7662/* Opcode 0xf2 0x0f 0xdf - invalid */
7663
7664/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7665FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7666/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
7667FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
7668/* Opcode 0xf3 0x0f 0xe0 - invalid */
7669/* Opcode 0xf2 0x0f 0xe0 - invalid */
7670
7671/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7672FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7673/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
7674FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
7675/* Opcode 0xf3 0x0f 0xe1 - invalid */
7676/* Opcode 0xf2 0x0f 0xe1 - invalid */
7677
7678/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7679FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7680/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
7681FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
7682/* Opcode 0xf3 0x0f 0xe2 - invalid */
7683/* Opcode 0xf2 0x0f 0xe2 - invalid */
7684
7685/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7686FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7687/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
7688FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
7689/* Opcode 0xf3 0x0f 0xe3 - invalid */
7690/* Opcode 0xf2 0x0f 0xe3 - invalid */
7691
7692/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7693FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7694/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
7695FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
7696/* Opcode 0xf3 0x0f 0xe4 - invalid */
7697/* Opcode 0xf2 0x0f 0xe4 - invalid */
7698
7699/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7700FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7701/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
7702FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
7703/* Opcode 0xf3 0x0f 0xe5 - invalid */
7704/* Opcode 0xf2 0x0f 0xe5 - invalid */
7705
7706/* Opcode 0x0f 0xe6 - invalid */
7707/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
7708FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
7709/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
7710FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
7711/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
7712FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
7713
7714
7715/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
7716FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
7717{
7718 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7719 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7720 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7721 {
7722 /* Register, memory. */
7723 IEM_MC_BEGIN(0, 2);
7724 IEM_MC_LOCAL(uint64_t, uSrc);
7725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7726
7727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7729 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7730 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7731
7732 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7733 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7734
7735 IEM_MC_ADVANCE_RIP();
7736 IEM_MC_END();
7737 return VINF_SUCCESS;
7738 }
7739 /* The register, register encoding is invalid. */
7740 return IEMOP_RAISE_INVALID_OPCODE();
7741}
7742
7743/** Opcode 0x66 0x0f 0xe7 - vmovntdq Mx, Vx */
7744FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
7745{
7746 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7747 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7748 {
7749 /* Register, memory. */
7750 IEMOP_MNEMONIC(vmovntdq_Mx_Vx, "vmovntdq Mx,Vx");
7751 IEM_MC_BEGIN(0, 2);
7752 IEM_MC_LOCAL(RTUINT128U, uSrc);
7753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7754
7755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7757 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7758 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7759
7760 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7761 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7762
7763 IEM_MC_ADVANCE_RIP();
7764 IEM_MC_END();
7765 return VINF_SUCCESS;
7766 }
7767
7768 /* The register, register encoding is invalid. */
7769 return IEMOP_RAISE_INVALID_OPCODE();
7770}
7771
7772/* Opcode 0xf3 0x0f 0xe7 - invalid */
7773/* Opcode 0xf2 0x0f 0xe7 - invalid */
7774
7775
7776/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
7777FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
7778/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
7779FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
7780/* Opcode 0xf3 0x0f 0xe8 - invalid */
7781/* Opcode 0xf2 0x0f 0xe8 - invalid */
7782
7783/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
7784FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
7785/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
7786FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
7787/* Opcode 0xf3 0x0f 0xe9 - invalid */
7788/* Opcode 0xf2 0x0f 0xe9 - invalid */
7789
7790/** Opcode 0x0f 0xea - pminsw Pq, Qq */
7791FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
7792/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
7793FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
7794/* Opcode 0xf3 0x0f 0xea - invalid */
7795/* Opcode 0xf2 0x0f 0xea - invalid */
7796
7797/** Opcode 0x0f 0xeb - por Pq, Qq */
7798FNIEMOP_STUB(iemOp_por_Pq_Qq);
7799/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
7800FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
7801/* Opcode 0xf3 0x0f 0xeb - invalid */
7802/* Opcode 0xf2 0x0f 0xeb - invalid */
7803
7804/** Opcode 0x0f 0xec - paddsb Pq, Qq */
7805FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
7806/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
7807FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
7808/* Opcode 0xf3 0x0f 0xec - invalid */
7809/* Opcode 0xf2 0x0f 0xec - invalid */
7810
7811/** Opcode 0x0f 0xed - paddsw Pq, Qq */
7812FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
7813/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
7814FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
7815/* Opcode 0xf3 0x0f 0xed - invalid */
7816/* Opcode 0xf2 0x0f 0xed - invalid */
7817
7818/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
7819FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
7820/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
7821FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
7822/* Opcode 0xf3 0x0f 0xee - invalid */
7823/* Opcode 0xf2 0x0f 0xee - invalid */
7824
7825
7826/** Opcode 0x0f 0xef - pxor Pq, Qq */
7827FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
7828{
7829 IEMOP_MNEMONIC(pxor, "pxor");
7830 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
7831}
7832
7833/** Opcode 0x66 0x0f 0xef - vpxor Vx, Hx, Wx */
7834FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
7835{
7836 IEMOP_MNEMONIC(vpxor, "vpxor");
7837 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7838}
7839
7840/* Opcode 0xf3 0x0f 0xef - invalid */
7841/* Opcode 0xf2 0x0f 0xef - invalid */
7842
7843/* Opcode 0x0f 0xf0 - invalid */
7844/* Opcode 0x66 0x0f 0xf0 - invalid */
7845/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
7846FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
7847
7848/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
7849FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
7850/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
7851FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
7852/* Opcode 0xf2 0x0f 0xf1 - invalid */
7853
7854/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
7855FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
7856/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
7857FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
7858/* Opcode 0xf2 0x0f 0xf2 - invalid */
7859
7860/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
7861FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
7862/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
7863FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
7864/* Opcode 0xf2 0x0f 0xf3 - invalid */
7865
7866/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
7867FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
7868/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
7869FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
7870/* Opcode 0xf2 0x0f 0xf4 - invalid */
7871
7872/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
7873FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
7874/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
7875FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
7876/* Opcode 0xf2 0x0f 0xf5 - invalid */
7877
7878/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
7879FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
7880/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
7881FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
7882/* Opcode 0xf2 0x0f 0xf6 - invalid */
7883
7884/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
7885FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
7886/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
7887FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
7888/* Opcode 0xf2 0x0f 0xf7 - invalid */
7889
7890/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
7891FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
7892/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
7893FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
7894/* Opcode 0xf2 0x0f 0xf8 - invalid */
7895
7896/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
7897FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
7898/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
7899FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
7900/* Opcode 0xf2 0x0f 0xf9 - invalid */
7901
7902/** Opcode 0x0f 0xfa - psubd Pq, Qq */
7903FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
7904/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
7905FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
7906/* Opcode 0xf2 0x0f 0xfa - invalid */
7907
7908/** Opcode 0x0f 0xfb - psubq Pq, Qq */
7909FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
7910/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
7911FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
7912/* Opcode 0xf2 0x0f 0xfb - invalid */
7913
7914/** Opcode 0x0f 0xfc - paddb Pq, Qq */
7915FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
7916/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
7917FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
7918/* Opcode 0xf2 0x0f 0xfc - invalid */
7919
7920/** Opcode 0x0f 0xfd - paddw Pq, Qq */
7921FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
7922/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
7923FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
7924/* Opcode 0xf2 0x0f 0xfd - invalid */
7925
7926/** Opcode 0x0f 0xfe - paddd Pq, Qq */
7927FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
7928/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
7929FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
7930/* Opcode 0xf2 0x0f 0xfe - invalid */
7931
7932
7933/** Opcode **** 0x0f 0xff - UD0 */
7934FNIEMOP_DEF(iemOp_ud0)
7935{
7936 IEMOP_MNEMONIC(ud0, "ud0");
7937 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
7938 {
7939 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
7940#ifndef TST_IEM_CHECK_MC
7941 RTGCPTR GCPtrEff;
7942 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
7943 if (rcStrict != VINF_SUCCESS)
7944 return rcStrict;
7945#endif
7946 IEMOP_HLP_DONE_DECODING();
7947 }
7948 return IEMOP_RAISE_INVALID_OPCODE();
7949}
7950
7951
7952
7953/**
7954 * Two byte opcode map, first byte 0x0f.
7955 *
7956 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
7957 * check if it needs updating as well when making changes.
7958 */
7959IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
7960{
7961 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7962 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
7963 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
7964 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
7965 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
7966 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
7967 /* 0x05 */ IEMOP_X4(iemOp_syscall),
7968 /* 0x06 */ IEMOP_X4(iemOp_clts),
7969 /* 0x07 */ IEMOP_X4(iemOp_sysret),
7970 /* 0x08 */ IEMOP_X4(iemOp_invd),
7971 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
7972 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
7973 /* 0x0b */ IEMOP_X4(iemOp_ud2),
7974 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
7975 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
7976 /* 0x0e */ IEMOP_X4(iemOp_femms),
7977 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
7978
7979 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
7980 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
7981 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7982 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7983 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7984 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7985 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7986 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7987 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
7988 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
7989 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
7990 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
7991 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
7992 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
7993 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
7994 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
7995
7996 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
7997 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
7998 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
7999 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8000 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8001 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8002 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8003 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8004 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8005 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8006 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
8007 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8008 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
8009 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
8010 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8011 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8012
8013 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8014 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8015 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8016 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8017 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8018 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8019 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8020 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8021 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
8022 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8023 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
8024 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8025 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8026 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8027 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8028 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8029
8030 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
8031 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
8032 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
8033 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
8034 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
8035 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
8036 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
8037 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
8038 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
8039 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
8040 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8041 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8042 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8043 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8044 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8045 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8046
8047 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8048 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
8049 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8050 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8051 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8052 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8053 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8054 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8055 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8056 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8057 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8058 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8059 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8060 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8061 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8062 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8063
8064 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8065 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8066 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8067 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8068 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8069 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8070 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8071 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8072 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8073 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8074 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8075 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8076 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8077 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8078 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8079 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
8080
8081 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
8082 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8083 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8084 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8085 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8086 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8087 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8088 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8089
8090 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8091 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8092 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8093 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8094 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8095 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8096 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
8097 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
8098
8099 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8100 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8101 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8102 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8103 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8104 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8105 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8106 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8107 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8108 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8109 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8110 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8111 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8112 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8113 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8114 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8115
8116 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8117 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8118 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8119 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8120 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8121 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8122 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8123 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8124 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8125 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8126 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8127 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8128 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8129 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8130 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8131 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8132
8133 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8134 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8135 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8136 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8137 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8138 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8139 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8140 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8141 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8142 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8143 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8144 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8145 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8146 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8147 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8148 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8149
8150 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8151 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8152 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8153 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8154 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8155 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8156 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8157 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8158 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8159 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8160 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8161 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8162 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8163 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8164 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8165 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8166
8167 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8168 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8169 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8170 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8171 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8172 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8173 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8174 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8175 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8176 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8177 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8178 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8179 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8180 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8181 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8182 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8183
8184 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8185 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8186 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8187 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8188 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8189 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8190 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8191 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8192 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8193 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8194 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8195 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8196 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8197 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8198 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8199 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8200
8201 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8202 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8203 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8204 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8205 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8206 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8207 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8208 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8209 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8210 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8211 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8212 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8213 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8214 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8215 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8216 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8217
8218 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8219 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8220 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8221 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8222 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8223 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8224 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8225 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8226 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8227 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8228 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8229 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8230 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8231 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8232 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8233 /* 0xff */ IEMOP_X4(iemOp_ud0),
8234};
8235AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8236
8237
8238/**
8239 * VEX opcode map \#1.
8240 *
8241 * @remarks This is (currently) a subset of g_apfnTwoByteMap, so please check if
8242 * it it needs updating too when making changes.
8243 */
8244IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =
8245{
8246 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8247 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
8248 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
8249 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
8250 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
8251 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
8252 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
8253 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
8254 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
8255 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
8256 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
8257 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
8258 /* 0x0b */ IEMOP_X4(iemOp_InvalidNeedRM),
8259 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
8260 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
8261 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
8262 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
8263
8264 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
8265 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
8266 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
8267 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8268 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8269 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8270 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
8271 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8272 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
8273 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
8274 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
8275 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
8276 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
8277 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
8278 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
8279 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
8280
8281 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
8282 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
8283 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
8284 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
8285 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
8286 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
8287 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
8288 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
8289 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8290 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8291 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
8292 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8293 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
8294 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
8295 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8296 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8297
8298 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
8299 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
8300 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
8301 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
8302 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
8303 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
8304 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
8305 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
8306 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8307 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8308 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8309 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8310 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8311 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8312 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8313 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8314
8315 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
8316 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
8317 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
8318 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
8319 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
8320 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
8321 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
8322 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
8323 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
8324 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
8325 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
8326 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
8327 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
8328 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
8329 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
8330 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
8331
8332 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8333 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
8334 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8335 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8336 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8337 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8338 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8339 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8340 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8341 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8342 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8343 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8344 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8345 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8346 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8347 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8348
8349 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8350 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8351 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8352 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8353 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8354 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8355 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8356 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8357 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8358 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8359 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8360 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8361 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8362 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8363 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8364 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
8365
8366 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
8367 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_Grp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8368 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_Grp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8369 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_Grp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8370 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8371 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8372 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8373 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8374 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
8375 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
8376 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
8377 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
8378 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8379 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8380 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
8381 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
8382
8383 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
8384 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
8385 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
8386 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
8387 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
8388 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
8389 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
8390 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
8391 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
8392 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
8393 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
8394 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
8395 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
8396 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
8397 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
8398 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
8399 IEMOP_X4(iemOp_InvalidNeedRM),
8400 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
8401 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
8402 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
8403 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
8404 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
8405 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
8406 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
8407 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
8408 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
8409 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
8410 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
8411 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
8412 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
8413 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
8414 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
8415 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
8416
8417 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8418 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8419 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8420 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8421 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8422 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8423 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8424 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8425 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8426 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8427 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
8428 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
8429 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
8430 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
8431 /* 0xae */ IEMOP_X4(iemOp_Grp15), /** @todo groups and vex */
8432 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
8433
8434 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8435 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8436 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8437 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8438 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8439 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8440 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8441 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8442 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8443 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8444 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
8445 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
8446 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
8447 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
8448 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
8449 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
8450
8451 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8452 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8453 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8454 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8455 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8456 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8457 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8458 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8459 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8460 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8461 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
8462 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
8463 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
8464 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
8465 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
8466 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
8467
8468 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8469 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8470 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8471 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8472 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8473 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8474 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8475 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8476 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8477 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8478 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8479 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8480 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8481 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8482 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8483 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8484
8485 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8486 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8487 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8488 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8489 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8490 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8491 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8492 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8493 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8494 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8495 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8496 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8497 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8498 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8499 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8500 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8501
8502 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8503 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8504 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8505 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8506 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8507 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8508 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8509 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8510 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8511 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8512 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8513 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8514 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8515 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8516 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8517 /* 0xff */ IEMOP_X4(iemOp_ud0),
8518};
8519AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8520/** @} */
8521
8522
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette