VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 66464

Last change on this file since 66464 was 66464, checked in by vboxsync, 8 years ago

IEM: Implemented movss Vss,Wss (f3 0f 10).

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 334.4 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 66464 2017-04-06 19:22:01Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/** @name ..... opcodes.
26 *
27 * @{
28 */
29
30/** @} */
31
32
33/** @name Two byte opcodes (first byte 0x0f).
34 *
35 * @{
36 */
37
38/** Opcode 0x0f 0x00 /0. */
39FNIEMOPRM_DEF(iemOp_Grp6_sldt)
40{
41 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
42 IEMOP_HLP_MIN_286();
43 IEMOP_HLP_NO_REAL_OR_V86_MODE();
44
45 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
46 {
47 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
48 switch (pVCpu->iem.s.enmEffOpSize)
49 {
50 case IEMMODE_16BIT:
51 IEM_MC_BEGIN(0, 1);
52 IEM_MC_LOCAL(uint16_t, u16Ldtr);
53 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
54 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
55 IEM_MC_ADVANCE_RIP();
56 IEM_MC_END();
57 break;
58
59 case IEMMODE_32BIT:
60 IEM_MC_BEGIN(0, 1);
61 IEM_MC_LOCAL(uint32_t, u32Ldtr);
62 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
63 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
64 IEM_MC_ADVANCE_RIP();
65 IEM_MC_END();
66 break;
67
68 case IEMMODE_64BIT:
69 IEM_MC_BEGIN(0, 1);
70 IEM_MC_LOCAL(uint64_t, u64Ldtr);
71 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
72 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
73 IEM_MC_ADVANCE_RIP();
74 IEM_MC_END();
75 break;
76
77 IEM_NOT_REACHED_DEFAULT_CASE_RET();
78 }
79 }
80 else
81 {
82 IEM_MC_BEGIN(0, 2);
83 IEM_MC_LOCAL(uint16_t, u16Ldtr);
84 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
85 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
86 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
87 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
88 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
89 IEM_MC_ADVANCE_RIP();
90 IEM_MC_END();
91 }
92 return VINF_SUCCESS;
93}
94
95
96/** Opcode 0x0f 0x00 /1. */
97FNIEMOPRM_DEF(iemOp_Grp6_str)
98{
99 IEMOP_MNEMONIC(str, "str Rv/Mw");
100 IEMOP_HLP_MIN_286();
101 IEMOP_HLP_NO_REAL_OR_V86_MODE();
102
103 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
104 {
105 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
106 switch (pVCpu->iem.s.enmEffOpSize)
107 {
108 case IEMMODE_16BIT:
109 IEM_MC_BEGIN(0, 1);
110 IEM_MC_LOCAL(uint16_t, u16Tr);
111 IEM_MC_FETCH_TR_U16(u16Tr);
112 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
113 IEM_MC_ADVANCE_RIP();
114 IEM_MC_END();
115 break;
116
117 case IEMMODE_32BIT:
118 IEM_MC_BEGIN(0, 1);
119 IEM_MC_LOCAL(uint32_t, u32Tr);
120 IEM_MC_FETCH_TR_U32(u32Tr);
121 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
122 IEM_MC_ADVANCE_RIP();
123 IEM_MC_END();
124 break;
125
126 case IEMMODE_64BIT:
127 IEM_MC_BEGIN(0, 1);
128 IEM_MC_LOCAL(uint64_t, u64Tr);
129 IEM_MC_FETCH_TR_U64(u64Tr);
130 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
131 IEM_MC_ADVANCE_RIP();
132 IEM_MC_END();
133 break;
134
135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
136 }
137 }
138 else
139 {
140 IEM_MC_BEGIN(0, 2);
141 IEM_MC_LOCAL(uint16_t, u16Tr);
142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
144 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
145 IEM_MC_FETCH_TR_U16(u16Tr);
146 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
147 IEM_MC_ADVANCE_RIP();
148 IEM_MC_END();
149 }
150 return VINF_SUCCESS;
151}
152
153
154/** Opcode 0x0f 0x00 /2. */
155FNIEMOPRM_DEF(iemOp_Grp6_lldt)
156{
157 IEMOP_MNEMONIC(lldt, "lldt Ew");
158 IEMOP_HLP_MIN_286();
159 IEMOP_HLP_NO_REAL_OR_V86_MODE();
160
161 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
162 {
163 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
164 IEM_MC_BEGIN(1, 0);
165 IEM_MC_ARG(uint16_t, u16Sel, 0);
166 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
167 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
168 IEM_MC_END();
169 }
170 else
171 {
172 IEM_MC_BEGIN(1, 1);
173 IEM_MC_ARG(uint16_t, u16Sel, 0);
174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
176 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
177 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
178 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
179 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
180 IEM_MC_END();
181 }
182 return VINF_SUCCESS;
183}
184
185
186/** Opcode 0x0f 0x00 /3. */
187FNIEMOPRM_DEF(iemOp_Grp6_ltr)
188{
189 IEMOP_MNEMONIC(ltr, "ltr Ew");
190 IEMOP_HLP_MIN_286();
191 IEMOP_HLP_NO_REAL_OR_V86_MODE();
192
193 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
194 {
195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
196 IEM_MC_BEGIN(1, 0);
197 IEM_MC_ARG(uint16_t, u16Sel, 0);
198 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
199 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
200 IEM_MC_END();
201 }
202 else
203 {
204 IEM_MC_BEGIN(1, 1);
205 IEM_MC_ARG(uint16_t, u16Sel, 0);
206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
209 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
210 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
211 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
212 IEM_MC_END();
213 }
214 return VINF_SUCCESS;
215}
216
217
218/** Opcode 0x0f 0x00 /3. */
219FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
220{
221 IEMOP_HLP_MIN_286();
222 IEMOP_HLP_NO_REAL_OR_V86_MODE();
223
224 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
225 {
226 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
227 IEM_MC_BEGIN(2, 0);
228 IEM_MC_ARG(uint16_t, u16Sel, 0);
229 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
230 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
231 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
232 IEM_MC_END();
233 }
234 else
235 {
236 IEM_MC_BEGIN(2, 1);
237 IEM_MC_ARG(uint16_t, u16Sel, 0);
238 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
241 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
242 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
243 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
244 IEM_MC_END();
245 }
246 return VINF_SUCCESS;
247}
248
249
250/** Opcode 0x0f 0x00 /4. */
251FNIEMOPRM_DEF(iemOp_Grp6_verr)
252{
253 IEMOP_MNEMONIC(verr, "verr Ew");
254 IEMOP_HLP_MIN_286();
255 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
256}
257
258
259/** Opcode 0x0f 0x00 /5. */
260FNIEMOPRM_DEF(iemOp_Grp6_verw)
261{
262 IEMOP_MNEMONIC(verw, "verw Ew");
263 IEMOP_HLP_MIN_286();
264 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
265}
266
267
268/**
269 * Group 6 jump table.
270 */
271IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
272{
273 iemOp_Grp6_sldt,
274 iemOp_Grp6_str,
275 iemOp_Grp6_lldt,
276 iemOp_Grp6_ltr,
277 iemOp_Grp6_verr,
278 iemOp_Grp6_verw,
279 iemOp_InvalidWithRM,
280 iemOp_InvalidWithRM
281};
282
283/** Opcode 0x0f 0x00. */
284FNIEMOP_DEF(iemOp_Grp6)
285{
286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
287 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
288}
289
290
291/** Opcode 0x0f 0x01 /0. */
292FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
293{
294 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
295 IEMOP_HLP_MIN_286();
296 IEMOP_HLP_64BIT_OP_SIZE();
297 IEM_MC_BEGIN(2, 1);
298 IEM_MC_ARG(uint8_t, iEffSeg, 0);
299 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
302 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
303 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
304 IEM_MC_END();
305 return VINF_SUCCESS;
306}
307
308
309/** Opcode 0x0f 0x01 /0. */
310FNIEMOP_DEF(iemOp_Grp7_vmcall)
311{
312 IEMOP_BITCH_ABOUT_STUB();
313 return IEMOP_RAISE_INVALID_OPCODE();
314}
315
316
317/** Opcode 0x0f 0x01 /0. */
318FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
319{
320 IEMOP_BITCH_ABOUT_STUB();
321 return IEMOP_RAISE_INVALID_OPCODE();
322}
323
324
325/** Opcode 0x0f 0x01 /0. */
326FNIEMOP_DEF(iemOp_Grp7_vmresume)
327{
328 IEMOP_BITCH_ABOUT_STUB();
329 return IEMOP_RAISE_INVALID_OPCODE();
330}
331
332
333/** Opcode 0x0f 0x01 /0. */
334FNIEMOP_DEF(iemOp_Grp7_vmxoff)
335{
336 IEMOP_BITCH_ABOUT_STUB();
337 return IEMOP_RAISE_INVALID_OPCODE();
338}
339
340
341/** Opcode 0x0f 0x01 /1. */
342FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
343{
344 IEMOP_MNEMONIC(sidt, "sidt Ms");
345 IEMOP_HLP_MIN_286();
346 IEMOP_HLP_64BIT_OP_SIZE();
347 IEM_MC_BEGIN(2, 1);
348 IEM_MC_ARG(uint8_t, iEffSeg, 0);
349 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
352 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
353 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
354 IEM_MC_END();
355 return VINF_SUCCESS;
356}
357
358
359/** Opcode 0x0f 0x01 /1. */
360FNIEMOP_DEF(iemOp_Grp7_monitor)
361{
362 IEMOP_MNEMONIC(monitor, "monitor");
363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
364 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
365}
366
367
368/** Opcode 0x0f 0x01 /1. */
369FNIEMOP_DEF(iemOp_Grp7_mwait)
370{
371 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
373 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
374}
375
376
377/** Opcode 0x0f 0x01 /2. */
378FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
379{
380 IEMOP_MNEMONIC(lgdt, "lgdt");
381 IEMOP_HLP_64BIT_OP_SIZE();
382 IEM_MC_BEGIN(3, 1);
383 IEM_MC_ARG(uint8_t, iEffSeg, 0);
384 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
385 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
388 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
389 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
390 IEM_MC_END();
391 return VINF_SUCCESS;
392}
393
394
395/** Opcode 0x0f 0x01 0xd0. */
396FNIEMOP_DEF(iemOp_Grp7_xgetbv)
397{
398 IEMOP_MNEMONIC(xgetbv, "xgetbv");
399 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
400 {
401 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
402 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
403 }
404 return IEMOP_RAISE_INVALID_OPCODE();
405}
406
407
408/** Opcode 0x0f 0x01 0xd1. */
409FNIEMOP_DEF(iemOp_Grp7_xsetbv)
410{
411 IEMOP_MNEMONIC(xsetbv, "xsetbv");
412 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
413 {
414 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
416 }
417 return IEMOP_RAISE_INVALID_OPCODE();
418}
419
420
421/** Opcode 0x0f 0x01 /3. */
422FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
423{
424 IEMOP_MNEMONIC(lidt, "lidt");
425 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
426 ? IEMMODE_64BIT
427 : pVCpu->iem.s.enmEffOpSize;
428 IEM_MC_BEGIN(3, 1);
429 IEM_MC_ARG(uint8_t, iEffSeg, 0);
430 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
431 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
434 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
435 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
436 IEM_MC_END();
437 return VINF_SUCCESS;
438}
439
440
441#ifdef VBOX_WITH_NESTED_HWVIRT
442/** Opcode 0x0f 0x01 0xd8. */
443FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
444{
445 IEMOP_MNEMONIC(vmrun, "vmrun");
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
447}
448
449/** Opcode 0x0f 0x01 0xd9. */
450FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
451{
452 IEMOP_MNEMONIC(vmmcall, "vmmcall");
453 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
454}
455
456
457/** Opcode 0x0f 0x01 0xda. */
458FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
459{
460 IEMOP_MNEMONIC(vmload, "vmload");
461 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
462}
463
464
465/** Opcode 0x0f 0x01 0xdb. */
466FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
467{
468 IEMOP_MNEMONIC(vmsave, "vmsave");
469 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
470}
471
472
473/** Opcode 0x0f 0x01 0xdc. */
474FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
475{
476 IEMOP_MNEMONIC(stgi, "stgi");
477 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
478}
479
480
481/** Opcode 0x0f 0x01 0xdd. */
482FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
483{
484 IEMOP_MNEMONIC(clgi, "clgi");
485 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
486}
487
488
489/** Opcode 0x0f 0x01 0xdf. */
490FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
491{
492 IEMOP_MNEMONIC(invlpga, "invlpga");
493 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
494}
495#else
496/** Opcode 0x0f 0x01 0xd8. */
497FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
498
499/** Opcode 0x0f 0x01 0xd9. */
500FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
501
502/** Opcode 0x0f 0x01 0xda. */
503FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
504
505/** Opcode 0x0f 0x01 0xdb. */
506FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
507
508/** Opcode 0x0f 0x01 0xdc. */
509FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
510
511/** Opcode 0x0f 0x01 0xdd. */
512FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
513
514/** Opcode 0x0f 0x01 0xdf. */
515FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
516#endif /* VBOX_WITH_NESTED_HWVIRT */
517
518/** Opcode 0x0f 0x01 0xde. */
519FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
520
521/** Opcode 0x0f 0x01 /4. */
522FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
523{
524 IEMOP_MNEMONIC(smsw, "smsw");
525 IEMOP_HLP_MIN_286();
526 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
527 {
528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
529 switch (pVCpu->iem.s.enmEffOpSize)
530 {
531 case IEMMODE_16BIT:
532 IEM_MC_BEGIN(0, 1);
533 IEM_MC_LOCAL(uint16_t, u16Tmp);
534 IEM_MC_FETCH_CR0_U16(u16Tmp);
535 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
536 { /* likely */ }
537 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
538 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
539 else
540 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
541 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
542 IEM_MC_ADVANCE_RIP();
543 IEM_MC_END();
544 return VINF_SUCCESS;
545
546 case IEMMODE_32BIT:
547 IEM_MC_BEGIN(0, 1);
548 IEM_MC_LOCAL(uint32_t, u32Tmp);
549 IEM_MC_FETCH_CR0_U32(u32Tmp);
550 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
551 IEM_MC_ADVANCE_RIP();
552 IEM_MC_END();
553 return VINF_SUCCESS;
554
555 case IEMMODE_64BIT:
556 IEM_MC_BEGIN(0, 1);
557 IEM_MC_LOCAL(uint64_t, u64Tmp);
558 IEM_MC_FETCH_CR0_U64(u64Tmp);
559 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
560 IEM_MC_ADVANCE_RIP();
561 IEM_MC_END();
562 return VINF_SUCCESS;
563
564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
565 }
566 }
567 else
568 {
569 /* Ignore operand size here, memory refs are always 16-bit. */
570 IEM_MC_BEGIN(0, 2);
571 IEM_MC_LOCAL(uint16_t, u16Tmp);
572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
575 IEM_MC_FETCH_CR0_U16(u16Tmp);
576 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
577 { /* likely */ }
578 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
579 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
580 else
581 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
582 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
583 IEM_MC_ADVANCE_RIP();
584 IEM_MC_END();
585 return VINF_SUCCESS;
586 }
587}
588
589
590/** Opcode 0x0f 0x01 /6. */
591FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
592{
593 /* The operand size is effectively ignored, all is 16-bit and only the
594 lower 3-bits are used. */
595 IEMOP_MNEMONIC(lmsw, "lmsw");
596 IEMOP_HLP_MIN_286();
597 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
598 {
599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
600 IEM_MC_BEGIN(1, 0);
601 IEM_MC_ARG(uint16_t, u16Tmp, 0);
602 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
603 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
604 IEM_MC_END();
605 }
606 else
607 {
608 IEM_MC_BEGIN(1, 1);
609 IEM_MC_ARG(uint16_t, u16Tmp, 0);
610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
613 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
614 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
615 IEM_MC_END();
616 }
617 return VINF_SUCCESS;
618}
619
620
621/** Opcode 0x0f 0x01 /7. */
622FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
623{
624 IEMOP_MNEMONIC(invlpg, "invlpg");
625 IEMOP_HLP_MIN_486();
626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
627 IEM_MC_BEGIN(1, 1);
628 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
630 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
631 IEM_MC_END();
632 return VINF_SUCCESS;
633}
634
635
636/** Opcode 0x0f 0x01 /7. */
637FNIEMOP_DEF(iemOp_Grp7_swapgs)
638{
639 IEMOP_MNEMONIC(swapgs, "swapgs");
640 IEMOP_HLP_ONLY_64BIT();
641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
642 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
643}
644
645
646/** Opcode 0x0f 0x01 /7. */
647FNIEMOP_DEF(iemOp_Grp7_rdtscp)
648{
649 NOREF(pVCpu);
650 IEMOP_BITCH_ABOUT_STUB();
651 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
652}
653
654
655/**
656 * Group 7 jump table, memory variant.
657 */
658IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
659{
660 iemOp_Grp7_sgdt,
661 iemOp_Grp7_sidt,
662 iemOp_Grp7_lgdt,
663 iemOp_Grp7_lidt,
664 iemOp_Grp7_smsw,
665 iemOp_InvalidWithRM,
666 iemOp_Grp7_lmsw,
667 iemOp_Grp7_invlpg
668};
669
670
671/** Opcode 0x0f 0x01. */
672FNIEMOP_DEF(iemOp_Grp7)
673{
674 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
675 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
676 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
677
678 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
679 {
680 case 0:
681 switch (bRm & X86_MODRM_RM_MASK)
682 {
683 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
684 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
685 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
686 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
687 }
688 return IEMOP_RAISE_INVALID_OPCODE();
689
690 case 1:
691 switch (bRm & X86_MODRM_RM_MASK)
692 {
693 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
694 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
695 }
696 return IEMOP_RAISE_INVALID_OPCODE();
697
698 case 2:
699 switch (bRm & X86_MODRM_RM_MASK)
700 {
701 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
702 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
703 }
704 return IEMOP_RAISE_INVALID_OPCODE();
705
706 case 3:
707 switch (bRm & X86_MODRM_RM_MASK)
708 {
709 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
710 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
711 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
712 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
713 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
714 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
715 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
716 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
717 IEM_NOT_REACHED_DEFAULT_CASE_RET();
718 }
719
720 case 4:
721 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
722
723 case 5:
724 return IEMOP_RAISE_INVALID_OPCODE();
725
726 case 6:
727 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
728
729 case 7:
730 switch (bRm & X86_MODRM_RM_MASK)
731 {
732 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
733 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
734 }
735 return IEMOP_RAISE_INVALID_OPCODE();
736
737 IEM_NOT_REACHED_DEFAULT_CASE_RET();
738 }
739}
740
741/** Opcode 0x0f 0x00 /3. */
742FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
743{
744 IEMOP_HLP_NO_REAL_OR_V86_MODE();
745 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
746
747 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
748 {
749 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
750 switch (pVCpu->iem.s.enmEffOpSize)
751 {
752 case IEMMODE_16BIT:
753 {
754 IEM_MC_BEGIN(3, 0);
755 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
756 IEM_MC_ARG(uint16_t, u16Sel, 1);
757 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
758
759 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
760 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
761 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
762
763 IEM_MC_END();
764 return VINF_SUCCESS;
765 }
766
767 case IEMMODE_32BIT:
768 case IEMMODE_64BIT:
769 {
770 IEM_MC_BEGIN(3, 0);
771 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
772 IEM_MC_ARG(uint16_t, u16Sel, 1);
773 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
774
775 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
776 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
777 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
778
779 IEM_MC_END();
780 return VINF_SUCCESS;
781 }
782
783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
784 }
785 }
786 else
787 {
788 switch (pVCpu->iem.s.enmEffOpSize)
789 {
790 case IEMMODE_16BIT:
791 {
792 IEM_MC_BEGIN(3, 1);
793 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
794 IEM_MC_ARG(uint16_t, u16Sel, 1);
795 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
797
798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
799 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
800
801 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
802 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
803 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
804
805 IEM_MC_END();
806 return VINF_SUCCESS;
807 }
808
809 case IEMMODE_32BIT:
810 case IEMMODE_64BIT:
811 {
812 IEM_MC_BEGIN(3, 1);
813 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
814 IEM_MC_ARG(uint16_t, u16Sel, 1);
815 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
817
818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
819 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
820/** @todo testcase: make sure it's a 16-bit read. */
821
822 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
823 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
824 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
825
826 IEM_MC_END();
827 return VINF_SUCCESS;
828 }
829
830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
831 }
832 }
833}
834
835
836
837/** Opcode 0x0f 0x02. */
838FNIEMOP_DEF(iemOp_lar_Gv_Ew)
839{
840 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
841 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
842}
843
844
845/** Opcode 0x0f 0x03. */
846FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
847{
848 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
849 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
850}
851
852
853/** Opcode 0x0f 0x05. */
854FNIEMOP_DEF(iemOp_syscall)
855{
856 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
858 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
859}
860
861
862/** Opcode 0x0f 0x06. */
863FNIEMOP_DEF(iemOp_clts)
864{
865 IEMOP_MNEMONIC(clts, "clts");
866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
867 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
868}
869
870
871/** Opcode 0x0f 0x07. */
872FNIEMOP_DEF(iemOp_sysret)
873{
874 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
876 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
877}
878
879
880/** Opcode 0x0f 0x08. */
881FNIEMOP_STUB(iemOp_invd);
882// IEMOP_HLP_MIN_486();
883
884
885/** Opcode 0x0f 0x09. */
886FNIEMOP_DEF(iemOp_wbinvd)
887{
888 IEMOP_MNEMONIC(wbinvd, "wbinvd");
889 IEMOP_HLP_MIN_486();
890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
891 IEM_MC_BEGIN(0, 0);
892 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
893 IEM_MC_ADVANCE_RIP();
894 IEM_MC_END();
895 return VINF_SUCCESS; /* ignore for now */
896}
897
898
899/** Opcode 0x0f 0x0b. */
900FNIEMOP_DEF(iemOp_ud2)
901{
902 IEMOP_MNEMONIC(ud2, "ud2");
903 return IEMOP_RAISE_INVALID_OPCODE();
904}
905
906/** Opcode 0x0f 0x0d. */
907FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
908{
909 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
910 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
911 {
912 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
913 return IEMOP_RAISE_INVALID_OPCODE();
914 }
915
916 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
917 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
918 {
919 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
920 return IEMOP_RAISE_INVALID_OPCODE();
921 }
922
923 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
924 {
925 case 2: /* Aliased to /0 for the time being. */
926 case 4: /* Aliased to /0 for the time being. */
927 case 5: /* Aliased to /0 for the time being. */
928 case 6: /* Aliased to /0 for the time being. */
929 case 7: /* Aliased to /0 for the time being. */
930 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
931 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
932 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
933 IEM_NOT_REACHED_DEFAULT_CASE_RET();
934 }
935
936 IEM_MC_BEGIN(0, 1);
937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
940 /* Currently a NOP. */
941 NOREF(GCPtrEffSrc);
942 IEM_MC_ADVANCE_RIP();
943 IEM_MC_END();
944 return VINF_SUCCESS;
945}
946
947
948/** Opcode 0x0f 0x0e. */
949FNIEMOP_STUB(iemOp_femms);
950
951
952/** Opcode 0x0f 0x0f 0x0c. */
953FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
954
955/** Opcode 0x0f 0x0f 0x0d. */
956FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
957
958/** Opcode 0x0f 0x0f 0x1c. */
959FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
960
961/** Opcode 0x0f 0x0f 0x1d. */
962FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
963
964/** Opcode 0x0f 0x0f 0x8a. */
965FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
966
967/** Opcode 0x0f 0x0f 0x8e. */
968FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
969
970/** Opcode 0x0f 0x0f 0x90. */
971FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
972
973/** Opcode 0x0f 0x0f 0x94. */
974FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
975
976/** Opcode 0x0f 0x0f 0x96. */
977FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
978
979/** Opcode 0x0f 0x0f 0x97. */
980FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
981
982/** Opcode 0x0f 0x0f 0x9a. */
983FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
984
985/** Opcode 0x0f 0x0f 0x9e. */
986FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
987
988/** Opcode 0x0f 0x0f 0xa0. */
989FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
990
991/** Opcode 0x0f 0x0f 0xa4. */
992FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
993
994/** Opcode 0x0f 0x0f 0xa6. */
995FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
996
997/** Opcode 0x0f 0x0f 0xa7. */
998FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
999
1000/** Opcode 0x0f 0x0f 0xaa. */
1001FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1002
1003/** Opcode 0x0f 0x0f 0xae. */
1004FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1005
1006/** Opcode 0x0f 0x0f 0xb0. */
1007FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1008
1009/** Opcode 0x0f 0x0f 0xb4. */
1010FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1011
1012/** Opcode 0x0f 0x0f 0xb6. */
1013FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1014
1015/** Opcode 0x0f 0x0f 0xb7. */
1016FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1017
1018/** Opcode 0x0f 0x0f 0xbb. */
1019FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1020
1021/** Opcode 0x0f 0x0f 0xbf. */
1022FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1023
1024
1025/** Opcode 0x0f 0x0f. */
1026FNIEMOP_DEF(iemOp_3Dnow)
1027{
1028 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1029 {
1030 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1031 return IEMOP_RAISE_INVALID_OPCODE();
1032 }
1033
1034 /* This is pretty sparse, use switch instead of table. */
1035 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1036 switch (b)
1037 {
1038 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1039 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1040 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1041 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1042 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1043 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1044 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1045 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1046 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1047 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1048 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1049 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1050 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1051 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1052 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1053 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1054 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1055 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1056 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1057 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1058 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1059 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1060 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1061 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1062 default:
1063 return IEMOP_RAISE_INVALID_OPCODE();
1064 }
1065}
1066
1067
1068/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1069FNIEMOP_STUB(iemOp_vmovups_Vps_Wps);
1070/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1071FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
1072
1073
1074/**
1075 * @opcode 0x10
1076 * @oppfx 0xf3
1077 * @opcpuid sse
1078 * @opgroup og_sse_simdfp_datamove
1079 * @opxcpttype 5
1080 * @optest op1=1 op2=2 -> op1=2
1081 * @optest op1=0 op2=-22 -> op1=-22
1082 * @oponly
1083 */
1084FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1085{
1086 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZxReg, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1087 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1088 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1089 {
1090 /*
1091 * Register, register.
1092 */
1093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1094 IEM_MC_BEGIN(0, 1);
1095 IEM_MC_LOCAL(uint32_t, uSrc);
1096
1097 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1098 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1099 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1100 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1101
1102 IEM_MC_ADVANCE_RIP();
1103 IEM_MC_END();
1104 }
1105 else
1106 {
1107 /*
1108 * Memory, register.
1109 */
1110 IEM_MC_BEGIN(0, 2);
1111 IEM_MC_LOCAL(uint32_t, uSrc);
1112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1113
1114 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1116 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1117 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1118
1119 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1120 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1121
1122 IEM_MC_ADVANCE_RIP();
1123 IEM_MC_END();
1124 }
1125 return VINF_SUCCESS;
1126}
1127
1128
1129/** Opcode VEX 0xf3 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1130FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss);
1131
1132/** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1133FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
1134
1135
1136/**
1137 * @opcode 0x11
1138 * @oppfx none
1139 * @opcpuid sse
1140 * @opgroup og_sse_simdfp_datamove
1141 * @opxcpttype 4UA
1142 * @optest op1=1 op2=2 -> op1=2
1143 * @optest op1=0 op2=-42 -> op1=-42
1144 */
1145FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
1146{
1147 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1148 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1149 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1150 {
1151 /*
1152 * Register, register.
1153 */
1154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1155 IEM_MC_BEGIN(0, 0);
1156 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1157 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1158 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1159 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1160 IEM_MC_ADVANCE_RIP();
1161 IEM_MC_END();
1162 }
1163 else
1164 {
1165 /*
1166 * Memory, register.
1167 */
1168 IEM_MC_BEGIN(0, 2);
1169 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1171
1172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1174 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1175 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1176
1177 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1178 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1179
1180 IEM_MC_ADVANCE_RIP();
1181 IEM_MC_END();
1182 }
1183 return VINF_SUCCESS;
1184}
1185
1186
1187/**
1188 * @opcode 0x11
1189 * @oppfx 0x66
1190 * @opcpuid sse2
1191 * @opgroup og_sse2_pcksclr_datamove
1192 * @opxcpttype 4UA
1193 * @optest op1=1 op2=2 -> op1=2
1194 * @optest op1=0 op2=-42 -> op1=-42
1195 */
1196FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
1197{
1198 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1199 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1200 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1201 {
1202 /*
1203 * Register, register.
1204 */
1205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1206 IEM_MC_BEGIN(0, 0);
1207 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1208 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1209 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1210 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1211 IEM_MC_ADVANCE_RIP();
1212 IEM_MC_END();
1213 }
1214 else
1215 {
1216 /*
1217 * Memory, register.
1218 */
1219 IEM_MC_BEGIN(0, 2);
1220 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1222
1223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1225 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1226 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1227
1228 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1229 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1230
1231 IEM_MC_ADVANCE_RIP();
1232 IEM_MC_END();
1233 }
1234 return VINF_SUCCESS;
1235}
1236
1237
1238/**
1239 * @opcode 0x11
1240 * @oppfx 0xf3
1241 * @opcpuid sse
1242 * @opgroup og_sse_simdfp_datamove
1243 * @opxcpttype 5
1244 * @optest op1=1 op2=2 -> op1=2
1245 * @optest op1=0 op2=-22 -> op1=-22
1246 */
1247FNIEMOP_DEF(iemOp_vmovss_Wss_Hx_Vss)
1248{
1249 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1250 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1251 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1252 {
1253 /*
1254 * Register, register.
1255 */
1256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1257 IEM_MC_BEGIN(0, 1);
1258 IEM_MC_LOCAL(uint32_t, uSrc);
1259
1260 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1261 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1262 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1263 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1264
1265 IEM_MC_ADVANCE_RIP();
1266 IEM_MC_END();
1267 }
1268 else
1269 {
1270 /*
1271 * Memory, register.
1272 */
1273 IEM_MC_BEGIN(0, 2);
1274 IEM_MC_LOCAL(uint32_t, uSrc);
1275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1276
1277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1279 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1280 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1281
1282 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1283 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1284
1285 IEM_MC_ADVANCE_RIP();
1286 IEM_MC_END();
1287 }
1288 return VINF_SUCCESS;
1289}
1290
1291
1292/**
1293 * @opcode 0x11
1294 * @oppfx 0xf2
1295 * @opcpuid sse2
1296 * @opgroup og_sse2_pcksclr_datamove
1297 * @opxcpttype 5
1298 * @optest op1=1 op2=2 -> op1=2
1299 * @optest op1=0 op2=-42 -> op1=-42
1300 */
1301FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
1302{
1303 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1304 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1305 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1306 {
1307 /*
1308 * Register, register.
1309 */
1310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1311 IEM_MC_BEGIN(0, 1);
1312 IEM_MC_LOCAL(uint64_t, uSrc);
1313
1314 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1315 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1316 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1317 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1318
1319 IEM_MC_ADVANCE_RIP();
1320 IEM_MC_END();
1321 }
1322 else
1323 {
1324 /*
1325 * Memory, register.
1326 */
1327 IEM_MC_BEGIN(0, 2);
1328 IEM_MC_LOCAL(uint64_t, uSrc);
1329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1330
1331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1333 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1334 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1335
1336 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1337 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1338
1339 IEM_MC_ADVANCE_RIP();
1340 IEM_MC_END();
1341 }
1342 return VINF_SUCCESS;
1343}
1344
1345
1346FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
1347{
1348 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1349 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1350 {
1351 /**
1352 * @opcode 0x12
1353 * @opcodesub 11 mr/reg
1354 * @oppfx none
1355 * @opcpuid sse
1356 * @opgroup og_sse_simdfp_datamove
1357 * @opxcpttype 5
1358 * @optest op1=1 op2=2 -> op1=2
1359 * @optest op1=0 op2=-42 -> op1=-42
1360 */
1361 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1362
1363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1364 IEM_MC_BEGIN(0, 1);
1365 IEM_MC_LOCAL(uint64_t, uSrc);
1366
1367 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1368 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1369 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1370 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1371
1372 IEM_MC_ADVANCE_RIP();
1373 IEM_MC_END();
1374 }
1375 else
1376 {
1377 /**
1378 * @opdone
1379 * @opcode 0x12
1380 * @opcodesub !11 mr/reg
1381 * @oppfx none
1382 * @opcpuid sse
1383 * @opgroup og_sse_simdfp_datamove
1384 * @opxcpttype 5
1385 * @optest op1=1 op2=2 -> op1=2
1386 * @optest op1=0 op2=-42 -> op1=-42
1387 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1388 */
1389 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1390
1391 IEM_MC_BEGIN(0, 2);
1392 IEM_MC_LOCAL(uint64_t, uSrc);
1393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1394
1395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1397 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1398 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1399
1400 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1401 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1402
1403 IEM_MC_ADVANCE_RIP();
1404 IEM_MC_END();
1405 }
1406 return VINF_SUCCESS;
1407}
1408
1409
1410/**
1411 * @opcode 0x12
1412 * @opcodesub !11 mr/reg
1413 * @oppfx 0x66
1414 * @opcpuid sse2
1415 * @opgroup og_sse2_pcksclr_datamove
1416 * @opxcpttype 5
1417 * @optest op1=1 op2=2 -> op1=2
1418 * @optest op1=0 op2=-42 -> op1=-42
1419 */
1420FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1421{
1422 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1423 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1424 {
1425 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1426
1427 IEM_MC_BEGIN(0, 2);
1428 IEM_MC_LOCAL(uint64_t, uSrc);
1429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1430
1431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1433 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1434 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1435
1436 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1437 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1438
1439 IEM_MC_ADVANCE_RIP();
1440 IEM_MC_END();
1441 return VINF_SUCCESS;
1442 }
1443
1444 /**
1445 * @opdone
1446 * @opmnemonic ud660f12m3
1447 * @opcode 0x12
1448 * @opcodesub 11 mr/reg
1449 * @oppfx 0x66
1450 * @opunused immediate
1451 * @opcpuid sse
1452 * @optest ->
1453 */
1454 return IEMOP_RAISE_INVALID_OPCODE();
1455}
1456
1457
1458/**
1459 * @opcode 0x12
1460 * @oppfx 0xf3
1461 * @opcpuid sse3
1462 * @opgroup og_sse3_pcksclr_datamove
1463 * @opxcpttype 4
1464 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1465 * op1=0x00000002000000020000000100000001
1466 */
1467FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1468{
1469 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1470 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1471 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1472 {
1473 /*
1474 * Register, register.
1475 */
1476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1477 IEM_MC_BEGIN(2, 0);
1478 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1479 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1480
1481 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1482 IEM_MC_PREPARE_SSE_USAGE();
1483
1484 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1485 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1486 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1487
1488 IEM_MC_ADVANCE_RIP();
1489 IEM_MC_END();
1490 }
1491 else
1492 {
1493 /*
1494 * Register, memory.
1495 */
1496 IEM_MC_BEGIN(2, 2);
1497 IEM_MC_LOCAL(RTUINT128U, uSrc);
1498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1499 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1500 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1501
1502 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1504 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1505 IEM_MC_PREPARE_SSE_USAGE();
1506
1507 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1508 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1509 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1510
1511 IEM_MC_ADVANCE_RIP();
1512 IEM_MC_END();
1513 }
1514 return VINF_SUCCESS;
1515}
1516
1517
1518/**
1519 * @opcode 0x12
1520 * @oppfx 0xf2
1521 * @opcpuid sse3
1522 * @opgroup og_sse3_pcksclr_datamove
1523 * @opxcpttype 5
1524 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1525 * op1=0x22222222111111112222222211111111
1526 */
1527FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
1528{
1529 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1530 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1531 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1532 {
1533 /*
1534 * Register, register.
1535 */
1536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1537 IEM_MC_BEGIN(2, 0);
1538 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1539 IEM_MC_ARG(uint64_t, uSrc, 1);
1540
1541 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1542 IEM_MC_PREPARE_SSE_USAGE();
1543
1544 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1545 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1546 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1547
1548 IEM_MC_ADVANCE_RIP();
1549 IEM_MC_END();
1550 }
1551 else
1552 {
1553 /*
1554 * Register, memory.
1555 */
1556 IEM_MC_BEGIN(2, 2);
1557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1558 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1559 IEM_MC_ARG(uint64_t, uSrc, 1);
1560
1561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1563 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1564 IEM_MC_PREPARE_SSE_USAGE();
1565
1566 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1567 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1568 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1569
1570 IEM_MC_ADVANCE_RIP();
1571 IEM_MC_END();
1572 }
1573 return VINF_SUCCESS;
1574}
1575
1576
1577/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1578FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq);
1579
1580/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1581FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1582{
1583 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1585 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1586 {
1587#if 0
1588 /*
1589 * Register, register.
1590 */
1591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1592 IEM_MC_BEGIN(0, 1);
1593 IEM_MC_LOCAL(uint64_t, uSrc);
1594 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1595 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1596 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1597 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1598 IEM_MC_ADVANCE_RIP();
1599 IEM_MC_END();
1600#else
1601 return IEMOP_RAISE_INVALID_OPCODE();
1602#endif
1603 }
1604 else
1605 {
1606 /*
1607 * Memory, register.
1608 */
1609 IEM_MC_BEGIN(0, 2);
1610 IEM_MC_LOCAL(uint64_t, uSrc);
1611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1612
1613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1615 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1616 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1617
1618 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1619 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1620
1621 IEM_MC_ADVANCE_RIP();
1622 IEM_MC_END();
1623 }
1624 return VINF_SUCCESS;
1625}
1626
1627/* Opcode 0xf3 0x0f 0x13 - invalid */
1628/* Opcode 0xf2 0x0f 0x13 - invalid */
1629
1630/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1631FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1632/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1633FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1634/* Opcode 0xf3 0x0f 0x14 - invalid */
1635/* Opcode 0xf2 0x0f 0x14 - invalid */
1636/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1637FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1638/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1639FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1640/* Opcode 0xf3 0x0f 0x15 - invalid */
1641/* Opcode 0xf2 0x0f 0x15 - invalid */
1642/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1643FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1644/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1645FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1646/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1647FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1648/* Opcode 0xf2 0x0f 0x16 - invalid */
1649/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1650FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1651/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1652FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1653/* Opcode 0xf3 0x0f 0x17 - invalid */
1654/* Opcode 0xf2 0x0f 0x17 - invalid */
1655
1656
1657/** Opcode 0x0f 0x18. */
1658FNIEMOP_DEF(iemOp_prefetch_Grp16)
1659{
1660 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1661 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1662 {
1663 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1664 {
1665 case 4: /* Aliased to /0 for the time being according to AMD. */
1666 case 5: /* Aliased to /0 for the time being according to AMD. */
1667 case 6: /* Aliased to /0 for the time being according to AMD. */
1668 case 7: /* Aliased to /0 for the time being according to AMD. */
1669 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1670 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1671 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1672 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1673 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1674 }
1675
1676 IEM_MC_BEGIN(0, 1);
1677 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1680 /* Currently a NOP. */
1681 NOREF(GCPtrEffSrc);
1682 IEM_MC_ADVANCE_RIP();
1683 IEM_MC_END();
1684 return VINF_SUCCESS;
1685 }
1686
1687 return IEMOP_RAISE_INVALID_OPCODE();
1688}
1689
1690
1691/** Opcode 0x0f 0x19..0x1f. */
1692FNIEMOP_DEF(iemOp_nop_Ev)
1693{
1694 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1696 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1697 {
1698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1699 IEM_MC_BEGIN(0, 0);
1700 IEM_MC_ADVANCE_RIP();
1701 IEM_MC_END();
1702 }
1703 else
1704 {
1705 IEM_MC_BEGIN(0, 1);
1706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1709 /* Currently a NOP. */
1710 NOREF(GCPtrEffSrc);
1711 IEM_MC_ADVANCE_RIP();
1712 IEM_MC_END();
1713 }
1714 return VINF_SUCCESS;
1715}
1716
1717
1718/** Opcode 0x0f 0x20. */
1719FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1720{
1721 /* mod is ignored, as is operand size overrides. */
1722 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1723 IEMOP_HLP_MIN_386();
1724 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1725 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1726 else
1727 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1728
1729 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1730 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1731 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1732 {
1733 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1734 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1735 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1736 iCrReg |= 8;
1737 }
1738 switch (iCrReg)
1739 {
1740 case 0: case 2: case 3: case 4: case 8:
1741 break;
1742 default:
1743 return IEMOP_RAISE_INVALID_OPCODE();
1744 }
1745 IEMOP_HLP_DONE_DECODING();
1746
1747 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1748}
1749
1750
1751/** Opcode 0x0f 0x21. */
1752FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1753{
1754 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1755 IEMOP_HLP_MIN_386();
1756 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1758 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1759 return IEMOP_RAISE_INVALID_OPCODE();
1760 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1761 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1762 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1763}
1764
1765
1766/** Opcode 0x0f 0x22. */
1767FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1768{
1769 /* mod is ignored, as is operand size overrides. */
1770 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1771 IEMOP_HLP_MIN_386();
1772 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1773 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1774 else
1775 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1776
1777 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1778 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1779 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1780 {
1781 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1782 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1783 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1784 iCrReg |= 8;
1785 }
1786 switch (iCrReg)
1787 {
1788 case 0: case 2: case 3: case 4: case 8:
1789 break;
1790 default:
1791 return IEMOP_RAISE_INVALID_OPCODE();
1792 }
1793 IEMOP_HLP_DONE_DECODING();
1794
1795 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1796}
1797
1798
1799/** Opcode 0x0f 0x23. */
1800FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1801{
1802 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1803 IEMOP_HLP_MIN_386();
1804 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1806 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1807 return IEMOP_RAISE_INVALID_OPCODE();
1808 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1809 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1810 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1811}
1812
1813
1814/** Opcode 0x0f 0x24. */
1815FNIEMOP_DEF(iemOp_mov_Rd_Td)
1816{
1817 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1818 /** @todo works on 386 and 486. */
1819 /* The RM byte is not considered, see testcase. */
1820 return IEMOP_RAISE_INVALID_OPCODE();
1821}
1822
1823
1824/** Opcode 0x0f 0x26. */
1825FNIEMOP_DEF(iemOp_mov_Td_Rd)
1826{
1827 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1828 /** @todo works on 386 and 486. */
1829 /* The RM byte is not considered, see testcase. */
1830 return IEMOP_RAISE_INVALID_OPCODE();
1831}
1832
1833
1834/** Opcode 0x0f 0x28 - vmovaps Vps, Wps */
1835FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1836{
1837 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1838 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1839 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1840 {
1841 /*
1842 * Register, register.
1843 */
1844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1845 IEM_MC_BEGIN(0, 0);
1846 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1847 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1848 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1849 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1850 IEM_MC_ADVANCE_RIP();
1851 IEM_MC_END();
1852 }
1853 else
1854 {
1855 /*
1856 * Register, memory.
1857 */
1858 IEM_MC_BEGIN(0, 2);
1859 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1861
1862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1864 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1865 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1866
1867 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1868 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1869
1870 IEM_MC_ADVANCE_RIP();
1871 IEM_MC_END();
1872 }
1873 return VINF_SUCCESS;
1874}
1875
1876/** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */
1877FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1878{
1879 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1880 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1881 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1882 {
1883 /*
1884 * Register, register.
1885 */
1886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1887 IEM_MC_BEGIN(0, 0);
1888 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1889 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1890 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1891 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1892 IEM_MC_ADVANCE_RIP();
1893 IEM_MC_END();
1894 }
1895 else
1896 {
1897 /*
1898 * Register, memory.
1899 */
1900 IEM_MC_BEGIN(0, 2);
1901 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1902 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1903
1904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1906 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1907 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1908
1909 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1910 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1911
1912 IEM_MC_ADVANCE_RIP();
1913 IEM_MC_END();
1914 }
1915 return VINF_SUCCESS;
1916}
1917
1918/* Opcode 0xf3 0x0f 0x28 - invalid */
1919/* Opcode 0xf2 0x0f 0x28 - invalid */
1920
1921/** Opcode 0x0f 0x29 - vmovaps Wps, Vps */
1922FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1923{
1924 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1925 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1926 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1927 {
1928 /*
1929 * Register, register.
1930 */
1931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1932 IEM_MC_BEGIN(0, 0);
1933 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1934 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1935 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1936 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1937 IEM_MC_ADVANCE_RIP();
1938 IEM_MC_END();
1939 }
1940 else
1941 {
1942 /*
1943 * Memory, register.
1944 */
1945 IEM_MC_BEGIN(0, 2);
1946 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1948
1949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1951 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1952 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1953
1954 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1955 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1956
1957 IEM_MC_ADVANCE_RIP();
1958 IEM_MC_END();
1959 }
1960 return VINF_SUCCESS;
1961}
1962
1963/** Opcode 0x66 0x0f 0x29 - vmovapd Wpd,Vpd */
1964FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
1965{
1966 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
1967 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1968 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1969 {
1970 /*
1971 * Register, register.
1972 */
1973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1974 IEM_MC_BEGIN(0, 0);
1975 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1976 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1977 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1978 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1979 IEM_MC_ADVANCE_RIP();
1980 IEM_MC_END();
1981 }
1982 else
1983 {
1984 /*
1985 * Memory, register.
1986 */
1987 IEM_MC_BEGIN(0, 2);
1988 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1989 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1990
1991 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1993 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1994 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1995
1996 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1997 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1998
1999 IEM_MC_ADVANCE_RIP();
2000 IEM_MC_END();
2001 }
2002 return VINF_SUCCESS;
2003}
2004
2005/* Opcode 0xf3 0x0f 0x29 - invalid */
2006/* Opcode 0xf2 0x0f 0x29 - invalid */
2007
2008
2009/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2010FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2011/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2012FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2013/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2014FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
2015/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2016FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
2017
2018
2019/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
2020FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
2021{
2022 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
2023 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2024 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2025 {
2026 /*
2027 * memory, register.
2028 */
2029 IEM_MC_BEGIN(0, 2);
2030 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2032
2033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2035 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2036 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2037
2038 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2039 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2040
2041 IEM_MC_ADVANCE_RIP();
2042 IEM_MC_END();
2043 }
2044 /* The register, register encoding is invalid. */
2045 else
2046 return IEMOP_RAISE_INVALID_OPCODE();
2047 return VINF_SUCCESS;
2048}
2049
2050/** Opcode 0x66 0x0f 0x2b - vmovntpd Mpd, Vpd */
2051FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
2052{
2053 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
2054 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2055 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2056 {
2057 /*
2058 * memory, register.
2059 */
2060 IEM_MC_BEGIN(0, 2);
2061 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2063
2064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2066 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2067 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2068
2069 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2070 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2071
2072 IEM_MC_ADVANCE_RIP();
2073 IEM_MC_END();
2074 }
2075 /* The register, register encoding is invalid. */
2076 else
2077 return IEMOP_RAISE_INVALID_OPCODE();
2078 return VINF_SUCCESS;
2079}
2080/* Opcode 0xf3 0x0f 0x2b - invalid */
2081/* Opcode 0xf2 0x0f 0x2b - invalid */
2082
2083
2084/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2085FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2086/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2087FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2088/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
2089FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2090/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
2091FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2092
2093/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2094FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2095/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2096FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2097/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
2098FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2099/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
2100FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2101
2102/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
2103FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
2104/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
2105FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
2106/* Opcode 0xf3 0x0f 0x2e - invalid */
2107/* Opcode 0xf2 0x0f 0x2e - invalid */
2108
2109/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
2110FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
2111/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
2112FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
2113/* Opcode 0xf3 0x0f 0x2f - invalid */
2114/* Opcode 0xf2 0x0f 0x2f - invalid */
2115
2116/** Opcode 0x0f 0x30. */
2117FNIEMOP_DEF(iemOp_wrmsr)
2118{
2119 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2121 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2122}
2123
2124
2125/** Opcode 0x0f 0x31. */
2126FNIEMOP_DEF(iemOp_rdtsc)
2127{
2128 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2130 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2131}
2132
2133
2134/** Opcode 0x0f 0x33. */
2135FNIEMOP_DEF(iemOp_rdmsr)
2136{
2137 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2139 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2140}
2141
2142
2143/** Opcode 0x0f 0x34. */
2144FNIEMOP_STUB(iemOp_rdpmc);
2145/** Opcode 0x0f 0x34. */
2146FNIEMOP_STUB(iemOp_sysenter);
2147/** Opcode 0x0f 0x35. */
2148FNIEMOP_STUB(iemOp_sysexit);
2149/** Opcode 0x0f 0x37. */
2150FNIEMOP_STUB(iemOp_getsec);
2151/** Opcode 0x0f 0x38. */
2152FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
2153/** Opcode 0x0f 0x3a. */
2154FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
2155
2156
2157/**
2158 * Implements a conditional move.
2159 *
2160 * Wish there was an obvious way to do this where we could share and reduce
2161 * code bloat.
2162 *
2163 * @param a_Cnd The conditional "microcode" operation.
2164 */
2165#define CMOV_X(a_Cnd) \
2166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2167 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2168 { \
2169 switch (pVCpu->iem.s.enmEffOpSize) \
2170 { \
2171 case IEMMODE_16BIT: \
2172 IEM_MC_BEGIN(0, 1); \
2173 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2174 a_Cnd { \
2175 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2176 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2177 } IEM_MC_ENDIF(); \
2178 IEM_MC_ADVANCE_RIP(); \
2179 IEM_MC_END(); \
2180 return VINF_SUCCESS; \
2181 \
2182 case IEMMODE_32BIT: \
2183 IEM_MC_BEGIN(0, 1); \
2184 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2185 a_Cnd { \
2186 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2187 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2188 } IEM_MC_ELSE() { \
2189 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2190 } IEM_MC_ENDIF(); \
2191 IEM_MC_ADVANCE_RIP(); \
2192 IEM_MC_END(); \
2193 return VINF_SUCCESS; \
2194 \
2195 case IEMMODE_64BIT: \
2196 IEM_MC_BEGIN(0, 1); \
2197 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2198 a_Cnd { \
2199 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2200 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2201 } IEM_MC_ENDIF(); \
2202 IEM_MC_ADVANCE_RIP(); \
2203 IEM_MC_END(); \
2204 return VINF_SUCCESS; \
2205 \
2206 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2207 } \
2208 } \
2209 else \
2210 { \
2211 switch (pVCpu->iem.s.enmEffOpSize) \
2212 { \
2213 case IEMMODE_16BIT: \
2214 IEM_MC_BEGIN(0, 2); \
2215 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2216 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2218 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2219 a_Cnd { \
2220 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2221 } IEM_MC_ENDIF(); \
2222 IEM_MC_ADVANCE_RIP(); \
2223 IEM_MC_END(); \
2224 return VINF_SUCCESS; \
2225 \
2226 case IEMMODE_32BIT: \
2227 IEM_MC_BEGIN(0, 2); \
2228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2229 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2231 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2232 a_Cnd { \
2233 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2234 } IEM_MC_ELSE() { \
2235 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2236 } IEM_MC_ENDIF(); \
2237 IEM_MC_ADVANCE_RIP(); \
2238 IEM_MC_END(); \
2239 return VINF_SUCCESS; \
2240 \
2241 case IEMMODE_64BIT: \
2242 IEM_MC_BEGIN(0, 2); \
2243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2244 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2246 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2247 a_Cnd { \
2248 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2249 } IEM_MC_ENDIF(); \
2250 IEM_MC_ADVANCE_RIP(); \
2251 IEM_MC_END(); \
2252 return VINF_SUCCESS; \
2253 \
2254 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2255 } \
2256 } do {} while (0)
2257
2258
2259
2260/** Opcode 0x0f 0x40. */
2261FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2262{
2263 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2264 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2265}
2266
2267
2268/** Opcode 0x0f 0x41. */
2269FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2270{
2271 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2272 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2273}
2274
2275
2276/** Opcode 0x0f 0x42. */
2277FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2278{
2279 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2280 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2281}
2282
2283
2284/** Opcode 0x0f 0x43. */
2285FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2286{
2287 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2288 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2289}
2290
2291
2292/** Opcode 0x0f 0x44. */
2293FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2294{
2295 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2296 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2297}
2298
2299
2300/** Opcode 0x0f 0x45. */
2301FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2302{
2303 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2304 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2305}
2306
2307
2308/** Opcode 0x0f 0x46. */
2309FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2310{
2311 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2312 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2313}
2314
2315
2316/** Opcode 0x0f 0x47. */
2317FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2318{
2319 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2320 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2321}
2322
2323
2324/** Opcode 0x0f 0x48. */
2325FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2326{
2327 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2328 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2329}
2330
2331
2332/** Opcode 0x0f 0x49. */
2333FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2334{
2335 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2336 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2337}
2338
2339
2340/** Opcode 0x0f 0x4a. */
2341FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2342{
2343 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2344 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2345}
2346
2347
2348/** Opcode 0x0f 0x4b. */
2349FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2350{
2351 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2352 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2353}
2354
2355
2356/** Opcode 0x0f 0x4c. */
2357FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2358{
2359 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2360 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2361}
2362
2363
2364/** Opcode 0x0f 0x4d. */
2365FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2366{
2367 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2368 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2369}
2370
2371
2372/** Opcode 0x0f 0x4e. */
2373FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2374{
2375 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2376 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2377}
2378
2379
2380/** Opcode 0x0f 0x4f. */
2381FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2382{
2383 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2384 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2385}
2386
2387#undef CMOV_X
2388
2389/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
2390FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
2391/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
2392FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
2393/* Opcode 0xf3 0x0f 0x50 - invalid */
2394/* Opcode 0xf2 0x0f 0x50 - invalid */
2395
2396/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
2397FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2398/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
2399FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2400/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
2401FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2402/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2403FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2404
2405/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
2406FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2407/* Opcode 0x66 0x0f 0x52 - invalid */
2408/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
2409FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2410/* Opcode 0xf2 0x0f 0x52 - invalid */
2411
2412/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
2413FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2414/* Opcode 0x66 0x0f 0x53 - invalid */
2415/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
2416FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2417/* Opcode 0xf2 0x0f 0x53 - invalid */
2418
2419/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
2420FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
2421/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
2422FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
2423/* Opcode 0xf3 0x0f 0x54 - invalid */
2424/* Opcode 0xf2 0x0f 0x54 - invalid */
2425
2426/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
2427FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
2428/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
2429FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
2430/* Opcode 0xf3 0x0f 0x55 - invalid */
2431/* Opcode 0xf2 0x0f 0x55 - invalid */
2432
2433/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
2434FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
2435/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
2436FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
2437/* Opcode 0xf3 0x0f 0x56 - invalid */
2438/* Opcode 0xf2 0x0f 0x56 - invalid */
2439
2440/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
2441FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
2442/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
2443FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
2444/* Opcode 0xf3 0x0f 0x57 - invalid */
2445/* Opcode 0xf2 0x0f 0x57 - invalid */
2446
2447/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
2448FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2449/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
2450FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2451/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
2452FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2453/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
2454FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2455
2456/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
2457FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2458/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
2459FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2460/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
2461FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2462/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
2463FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2464
2465/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
2466FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2467/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
2468FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2469/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
2470FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2471/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2472FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2473
2474/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
2475FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2476/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
2477FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2478/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
2479FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2480/* Opcode 0xf2 0x0f 0x5b - invalid */
2481
2482/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
2483FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2484/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
2485FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2486/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
2487FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2488/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
2489FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2490
2491/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
2492FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2493/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
2494FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2495/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
2496FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2497/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
2498FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2499
2500/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
2501FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2502/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
2503FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2504/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
2505FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2506/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
2507FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2508
2509/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
2510FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2511/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
2512FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2513/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
2514FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2515/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
2516FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2517
2518/**
2519 * Common worker for MMX instructions on the forms:
2520 * pxxxx mm1, mm2/mem32
2521 *
2522 * The 2nd operand is the first half of a register, which in the memory case
2523 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2524 * memory accessed for MMX.
2525 *
2526 * Exceptions type 4.
2527 */
2528FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2529{
2530 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2531 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2532 {
2533 /*
2534 * Register, register.
2535 */
2536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2537 IEM_MC_BEGIN(2, 0);
2538 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2539 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2540 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2541 IEM_MC_PREPARE_SSE_USAGE();
2542 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2543 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2544 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2545 IEM_MC_ADVANCE_RIP();
2546 IEM_MC_END();
2547 }
2548 else
2549 {
2550 /*
2551 * Register, memory.
2552 */
2553 IEM_MC_BEGIN(2, 2);
2554 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2555 IEM_MC_LOCAL(uint64_t, uSrc);
2556 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2558
2559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2561 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2562 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2563
2564 IEM_MC_PREPARE_SSE_USAGE();
2565 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2566 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2567
2568 IEM_MC_ADVANCE_RIP();
2569 IEM_MC_END();
2570 }
2571 return VINF_SUCCESS;
2572}
2573
2574
2575/**
2576 * Common worker for SSE2 instructions on the forms:
2577 * pxxxx xmm1, xmm2/mem128
2578 *
2579 * The 2nd operand is the first half of a register, which in the memory case
2580 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2581 * memory accessed for MMX.
2582 *
2583 * Exceptions type 4.
2584 */
2585FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2586{
2587 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2588 if (!pImpl->pfnU64)
2589 return IEMOP_RAISE_INVALID_OPCODE();
2590 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2591 {
2592 /*
2593 * Register, register.
2594 */
2595 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2596 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2598 IEM_MC_BEGIN(2, 0);
2599 IEM_MC_ARG(uint64_t *, pDst, 0);
2600 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2601 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2602 IEM_MC_PREPARE_FPU_USAGE();
2603 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2604 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2605 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2606 IEM_MC_ADVANCE_RIP();
2607 IEM_MC_END();
2608 }
2609 else
2610 {
2611 /*
2612 * Register, memory.
2613 */
2614 IEM_MC_BEGIN(2, 2);
2615 IEM_MC_ARG(uint64_t *, pDst, 0);
2616 IEM_MC_LOCAL(uint32_t, uSrc);
2617 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2619
2620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2622 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2623 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2624
2625 IEM_MC_PREPARE_FPU_USAGE();
2626 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2627 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2628
2629 IEM_MC_ADVANCE_RIP();
2630 IEM_MC_END();
2631 }
2632 return VINF_SUCCESS;
2633}
2634
2635
2636/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2637FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2638{
2639 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2640 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2641}
2642
2643/** Opcode 0x66 0x0f 0x60 - vpunpcklbw Vx, Hx, W */
2644FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2645{
2646 IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
2647 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2648}
2649
2650/* Opcode 0xf3 0x0f 0x60 - invalid */
2651
2652
2653/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2654FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2655{
2656 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2657 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2658}
2659
2660/** Opcode 0x66 0x0f 0x61 - vpunpcklwd Vx, Hx, Wx */
2661FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2662{
2663 IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
2664 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2665}
2666
2667/* Opcode 0xf3 0x0f 0x61 - invalid */
2668
2669
2670/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2671FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2672{
2673 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2674 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2675}
2676
2677/** Opcode 0x66 0x0f 0x62 - vpunpckldq Vx, Hx, Wx */
2678FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2679{
2680 IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
2681 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2682}
2683
2684/* Opcode 0xf3 0x0f 0x62 - invalid */
2685
2686
2687
2688/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2689FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2690/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2691FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2692/* Opcode 0xf3 0x0f 0x63 - invalid */
2693
2694/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2695FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2696/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2697FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2698/* Opcode 0xf3 0x0f 0x64 - invalid */
2699
2700/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2701FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2702/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2703FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2704/* Opcode 0xf3 0x0f 0x65 - invalid */
2705
2706/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2707FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2708/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2709FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2710/* Opcode 0xf3 0x0f 0x66 - invalid */
2711
2712/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2713FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2714/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2715FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2716/* Opcode 0xf3 0x0f 0x67 - invalid */
2717
2718
2719/**
2720 * Common worker for MMX instructions on the form:
2721 * pxxxx mm1, mm2/mem64
2722 *
2723 * The 2nd operand is the second half of a register, which in the memory case
2724 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2725 * where it may read the full 128 bits or only the upper 64 bits.
2726 *
2727 * Exceptions type 4.
2728 */
2729FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2730{
2731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2732 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2733 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2734 {
2735 /*
2736 * Register, register.
2737 */
2738 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2739 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2741 IEM_MC_BEGIN(2, 0);
2742 IEM_MC_ARG(uint64_t *, pDst, 0);
2743 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2744 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2745 IEM_MC_PREPARE_FPU_USAGE();
2746 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2747 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2748 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2749 IEM_MC_ADVANCE_RIP();
2750 IEM_MC_END();
2751 }
2752 else
2753 {
2754 /*
2755 * Register, memory.
2756 */
2757 IEM_MC_BEGIN(2, 2);
2758 IEM_MC_ARG(uint64_t *, pDst, 0);
2759 IEM_MC_LOCAL(uint64_t, uSrc);
2760 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2762
2763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2765 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2766 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2767
2768 IEM_MC_PREPARE_FPU_USAGE();
2769 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2770 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2771
2772 IEM_MC_ADVANCE_RIP();
2773 IEM_MC_END();
2774 }
2775 return VINF_SUCCESS;
2776}
2777
2778
2779/**
2780 * Common worker for SSE2 instructions on the form:
2781 * pxxxx xmm1, xmm2/mem128
2782 *
2783 * The 2nd operand is the second half of a register, which in the memory case
2784 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2785 * where it may read the full 128 bits or only the upper 64 bits.
2786 *
2787 * Exceptions type 4.
2788 */
2789FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2790{
2791 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2792 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2793 {
2794 /*
2795 * Register, register.
2796 */
2797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2798 IEM_MC_BEGIN(2, 0);
2799 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2800 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2801 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2802 IEM_MC_PREPARE_SSE_USAGE();
2803 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2804 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2805 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2806 IEM_MC_ADVANCE_RIP();
2807 IEM_MC_END();
2808 }
2809 else
2810 {
2811 /*
2812 * Register, memory.
2813 */
2814 IEM_MC_BEGIN(2, 2);
2815 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2816 IEM_MC_LOCAL(RTUINT128U, uSrc);
2817 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
2818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2819
2820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2822 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2823 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2824
2825 IEM_MC_PREPARE_SSE_USAGE();
2826 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2827 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2828
2829 IEM_MC_ADVANCE_RIP();
2830 IEM_MC_END();
2831 }
2832 return VINF_SUCCESS;
2833}
2834
2835
2836/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2837FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2838{
2839 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2840 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2841}
2842
2843/** Opcode 0x66 0x0f 0x68 - vpunpckhbw Vx, Hx, Wx */
2844FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2845{
2846 IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx");
2847 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2848}
2849/* Opcode 0xf3 0x0f 0x68 - invalid */
2850
2851
2852/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2853FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2854{
2855 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2856 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2857}
2858
2859/** Opcode 0x66 0x0f 0x69 - vpunpckhwd Vx, Hx, Wx */
2860FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2861{
2862 IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx");
2863 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2864
2865}
2866/* Opcode 0xf3 0x0f 0x69 - invalid */
2867
2868
2869/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2870FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2871{
2872 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2873 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2874}
2875
2876/** Opcode 0x66 0x0f 0x6a - vpunpckhdq Vx, Hx, W */
2877FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2878{
2879 IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W");
2880 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2881}
2882/* Opcode 0xf3 0x0f 0x6a - invalid */
2883
2884
2885/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2886FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2887/** Opcode 0x66 0x0f 0x6b - vpackssdw Vx, Hx, Wx */
2888FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx);
2889/* Opcode 0xf3 0x0f 0x6b - invalid */
2890
2891
2892/* Opcode 0x0f 0x6c - invalid */
2893
2894/** Opcode 0x66 0x0f 0x6c - vpunpcklqdq Vx, Hx, Wx */
2895FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2896{
2897 IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
2898 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2899}
2900
2901/* Opcode 0xf3 0x0f 0x6c - invalid */
2902/* Opcode 0xf2 0x0f 0x6c - invalid */
2903
2904
2905/* Opcode 0x0f 0x6d - invalid */
2906
2907/** Opcode 0x66 0x0f 0x6d - vpunpckhqdq Vx, Hx, W */
2908FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
2909{
2910 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2911 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2912}
2913
2914/* Opcode 0xf3 0x0f 0x6d - invalid */
2915
2916
2917/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
2918FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
2919{
2920 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2921 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2922 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2923 else
2924 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2925 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2926 {
2927 /* MMX, greg */
2928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2929 IEM_MC_BEGIN(0, 1);
2930 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2931 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2932 IEM_MC_LOCAL(uint64_t, u64Tmp);
2933 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2934 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2935 else
2936 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2937 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2938 IEM_MC_ADVANCE_RIP();
2939 IEM_MC_END();
2940 }
2941 else
2942 {
2943 /* MMX, [mem] */
2944 IEM_MC_BEGIN(0, 2);
2945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2946 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2949 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2950 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2951 {
2952 IEM_MC_LOCAL(uint64_t, u64Tmp);
2953 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2954 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2955 }
2956 else
2957 {
2958 IEM_MC_LOCAL(uint32_t, u32Tmp);
2959 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2960 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2961 }
2962 IEM_MC_ADVANCE_RIP();
2963 IEM_MC_END();
2964 }
2965 return VINF_SUCCESS;
2966}
2967
2968/** Opcode 0x66 0x0f 0x6e - vmovd/q Vy, Ey */
2969FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
2970{
2971 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2972 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2973 IEMOP_MNEMONIC(vmovdq_Wq_Eq, "vmovq Wq,Eq");
2974 else
2975 IEMOP_MNEMONIC(vmovdq_Wd_Ed, "vmovd Wd,Ed");
2976 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2977 {
2978 /* XMM, greg*/
2979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2980 IEM_MC_BEGIN(0, 1);
2981 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2982 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2983 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2984 {
2985 IEM_MC_LOCAL(uint64_t, u64Tmp);
2986 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2987 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2988 }
2989 else
2990 {
2991 IEM_MC_LOCAL(uint32_t, u32Tmp);
2992 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2993 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2994 }
2995 IEM_MC_ADVANCE_RIP();
2996 IEM_MC_END();
2997 }
2998 else
2999 {
3000 /* XMM, [mem] */
3001 IEM_MC_BEGIN(0, 2);
3002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3003 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
3004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3006 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3007 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3008 {
3009 IEM_MC_LOCAL(uint64_t, u64Tmp);
3010 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3011 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3012 }
3013 else
3014 {
3015 IEM_MC_LOCAL(uint32_t, u32Tmp);
3016 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3017 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3018 }
3019 IEM_MC_ADVANCE_RIP();
3020 IEM_MC_END();
3021 }
3022 return VINF_SUCCESS;
3023}
3024
3025/* Opcode 0xf3 0x0f 0x6e - invalid */
3026
3027
3028/** Opcode 0x0f 0x6f - movq Pq, Qq */
3029FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3030{
3031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3032 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3033 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3034 {
3035 /*
3036 * Register, register.
3037 */
3038 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3039 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3041 IEM_MC_BEGIN(0, 1);
3042 IEM_MC_LOCAL(uint64_t, u64Tmp);
3043 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3044 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3045 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3046 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3047 IEM_MC_ADVANCE_RIP();
3048 IEM_MC_END();
3049 }
3050 else
3051 {
3052 /*
3053 * Register, memory.
3054 */
3055 IEM_MC_BEGIN(0, 2);
3056 IEM_MC_LOCAL(uint64_t, u64Tmp);
3057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3058
3059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3061 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3062 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3063 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3064 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3065
3066 IEM_MC_ADVANCE_RIP();
3067 IEM_MC_END();
3068 }
3069 return VINF_SUCCESS;
3070}
3071
3072/** Opcode 0x66 0x0f 0x6f - vmovdqa Vx, Wx */
3073FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
3074{
3075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3076 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3077 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3078 {
3079 /*
3080 * Register, register.
3081 */
3082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3083 IEM_MC_BEGIN(0, 0);
3084 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3085 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3086 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3087 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3088 IEM_MC_ADVANCE_RIP();
3089 IEM_MC_END();
3090 }
3091 else
3092 {
3093 /*
3094 * Register, memory.
3095 */
3096 IEM_MC_BEGIN(0, 2);
3097 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3099
3100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3102 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3103 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3104 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3105 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3106
3107 IEM_MC_ADVANCE_RIP();
3108 IEM_MC_END();
3109 }
3110 return VINF_SUCCESS;
3111}
3112
3113/** Opcode 0xf3 0x0f 0x6f - vmovdqu Vx, Wx */
3114FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
3115{
3116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3117 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3118 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3119 {
3120 /*
3121 * Register, register.
3122 */
3123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3124 IEM_MC_BEGIN(0, 0);
3125 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3126 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3127 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3128 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3129 IEM_MC_ADVANCE_RIP();
3130 IEM_MC_END();
3131 }
3132 else
3133 {
3134 /*
3135 * Register, memory.
3136 */
3137 IEM_MC_BEGIN(0, 2);
3138 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3140
3141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3143 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3144 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3145 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3146 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3147
3148 IEM_MC_ADVANCE_RIP();
3149 IEM_MC_END();
3150 }
3151 return VINF_SUCCESS;
3152}
3153
3154
3155/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3156FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3157{
3158 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3159 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3160 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3161 {
3162 /*
3163 * Register, register.
3164 */
3165 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3167
3168 IEM_MC_BEGIN(3, 0);
3169 IEM_MC_ARG(uint64_t *, pDst, 0);
3170 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3171 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3172 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3173 IEM_MC_PREPARE_FPU_USAGE();
3174 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3175 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3176 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3177 IEM_MC_ADVANCE_RIP();
3178 IEM_MC_END();
3179 }
3180 else
3181 {
3182 /*
3183 * Register, memory.
3184 */
3185 IEM_MC_BEGIN(3, 2);
3186 IEM_MC_ARG(uint64_t *, pDst, 0);
3187 IEM_MC_LOCAL(uint64_t, uSrc);
3188 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3190
3191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3192 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3193 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3195 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3196
3197 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3198 IEM_MC_PREPARE_FPU_USAGE();
3199 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3200 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3201
3202 IEM_MC_ADVANCE_RIP();
3203 IEM_MC_END();
3204 }
3205 return VINF_SUCCESS;
3206}
3207
3208/** Opcode 0x66 0x0f 0x70 - vpshufd Vx, Wx, Ib */
3209FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
3210{
3211 IEMOP_MNEMONIC(vpshufd_Vx_Wx_Ib, "vpshufd Vx,Wx,Ib");
3212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3213 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3214 {
3215 /*
3216 * Register, register.
3217 */
3218 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3220
3221 IEM_MC_BEGIN(3, 0);
3222 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3223 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3224 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3225 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3226 IEM_MC_PREPARE_SSE_USAGE();
3227 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3228 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3229 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3230 IEM_MC_ADVANCE_RIP();
3231 IEM_MC_END();
3232 }
3233 else
3234 {
3235 /*
3236 * Register, memory.
3237 */
3238 IEM_MC_BEGIN(3, 2);
3239 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3240 IEM_MC_LOCAL(RTUINT128U, uSrc);
3241 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3242 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3243
3244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3245 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3246 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3248 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3249
3250 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3251 IEM_MC_PREPARE_SSE_USAGE();
3252 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3253 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3254
3255 IEM_MC_ADVANCE_RIP();
3256 IEM_MC_END();
3257 }
3258 return VINF_SUCCESS;
3259}
3260
3261/** Opcode 0xf3 0x0f 0x70 - vpshufhw Vx, Wx, Ib */
3262FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
3263{
3264 IEMOP_MNEMONIC(vpshufhw_Vx_Wx_Ib, "vpshufhw Vx,Wx,Ib");
3265 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3266 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3267 {
3268 /*
3269 * Register, register.
3270 */
3271 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3273
3274 IEM_MC_BEGIN(3, 0);
3275 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3276 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3277 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3278 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3279 IEM_MC_PREPARE_SSE_USAGE();
3280 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3281 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3282 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3283 IEM_MC_ADVANCE_RIP();
3284 IEM_MC_END();
3285 }
3286 else
3287 {
3288 /*
3289 * Register, memory.
3290 */
3291 IEM_MC_BEGIN(3, 2);
3292 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3293 IEM_MC_LOCAL(RTUINT128U, uSrc);
3294 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3296
3297 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3298 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3299 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3301 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3302
3303 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3304 IEM_MC_PREPARE_SSE_USAGE();
3305 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3306 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3307
3308 IEM_MC_ADVANCE_RIP();
3309 IEM_MC_END();
3310 }
3311 return VINF_SUCCESS;
3312}
3313
3314/** Opcode 0xf2 0x0f 0x70 - vpshuflw Vx, Wx, Ib */
3315FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
3316{
3317 IEMOP_MNEMONIC(vpshuflw_Vx_Wx_Ib, "vpshuflw Vx,Wx,Ib");
3318 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3319 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3320 {
3321 /*
3322 * Register, register.
3323 */
3324 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3326
3327 IEM_MC_BEGIN(3, 0);
3328 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3329 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3330 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3331 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3332 IEM_MC_PREPARE_SSE_USAGE();
3333 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3334 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3335 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3336 IEM_MC_ADVANCE_RIP();
3337 IEM_MC_END();
3338 }
3339 else
3340 {
3341 /*
3342 * Register, memory.
3343 */
3344 IEM_MC_BEGIN(3, 2);
3345 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3346 IEM_MC_LOCAL(RTUINT128U, uSrc);
3347 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3349
3350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3351 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3352 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3354 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3355
3356 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3357 IEM_MC_PREPARE_SSE_USAGE();
3358 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3359 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3360
3361 IEM_MC_ADVANCE_RIP();
3362 IEM_MC_END();
3363 }
3364 return VINF_SUCCESS;
3365}
3366
3367
3368/** Opcode 0x0f 0x71 11/2. */
3369FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3370
3371/** Opcode 0x66 0x0f 0x71 11/2. */
3372FNIEMOP_STUB_1(iemOp_Grp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm);
3373
3374/** Opcode 0x0f 0x71 11/4. */
3375FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3376
3377/** Opcode 0x66 0x0f 0x71 11/4. */
3378FNIEMOP_STUB_1(iemOp_Grp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm);
3379
3380/** Opcode 0x0f 0x71 11/6. */
3381FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3382
3383/** Opcode 0x66 0x0f 0x71 11/6. */
3384FNIEMOP_STUB_1(iemOp_Grp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm);
3385
3386
3387/**
3388 * Group 12 jump table for register variant.
3389 */
3390IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3391{
3392 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3393 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3394 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3395 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3396 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3397 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3398 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3399 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3400};
3401AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3402
3403
3404/** Opcode 0x0f 0x71. */
3405FNIEMOP_DEF(iemOp_Grp12)
3406{
3407 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3408 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3409 /* register, register */
3410 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3411 + pVCpu->iem.s.idxPrefix], bRm);
3412 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3413}
3414
3415
3416/** Opcode 0x0f 0x72 11/2. */
3417FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3418
3419/** Opcode 0x66 0x0f 0x72 11/2. */
3420FNIEMOP_STUB_1(iemOp_Grp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm);
3421
3422/** Opcode 0x0f 0x72 11/4. */
3423FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3424
3425/** Opcode 0x66 0x0f 0x72 11/4. */
3426FNIEMOP_STUB_1(iemOp_Grp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm);
3427
3428/** Opcode 0x0f 0x72 11/6. */
3429FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3430
3431/** Opcode 0x66 0x0f 0x72 11/6. */
3432FNIEMOP_STUB_1(iemOp_Grp13_vpslld_Hx_Ux_Ib, uint8_t, bRm);
3433
3434
3435/**
3436 * Group 13 jump table for register variant.
3437 */
3438IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
3439{
3440 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3441 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3442 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3443 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3444 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3445 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3446 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3447 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3448};
3449AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
3450
3451/** Opcode 0x0f 0x72. */
3452FNIEMOP_DEF(iemOp_Grp13)
3453{
3454 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3455 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3456 /* register, register */
3457 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3458 + pVCpu->iem.s.idxPrefix], bRm);
3459 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3460}
3461
3462
3463/** Opcode 0x0f 0x73 11/2. */
3464FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3465
3466/** Opcode 0x66 0x0f 0x73 11/2. */
3467FNIEMOP_STUB_1(iemOp_Grp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm);
3468
3469/** Opcode 0x66 0x0f 0x73 11/3. */
3470FNIEMOP_STUB_1(iemOp_Grp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm); //NEXT
3471
3472/** Opcode 0x0f 0x73 11/6. */
3473FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3474
3475/** Opcode 0x66 0x0f 0x73 11/6. */
3476FNIEMOP_STUB_1(iemOp_Grp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm);
3477
3478/** Opcode 0x66 0x0f 0x73 11/7. */
3479FNIEMOP_STUB_1(iemOp_Grp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm); //NEXT
3480
3481/**
3482 * Group 14 jump table for register variant.
3483 */
3484IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
3485{
3486 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3487 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3488 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3489 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3490 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3491 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3492 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3493 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3494};
3495AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
3496
3497
3498/** Opcode 0x0f 0x73. */
3499FNIEMOP_DEF(iemOp_Grp14)
3500{
3501 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3502 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3503 /* register, register */
3504 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3505 + pVCpu->iem.s.idxPrefix], bRm);
3506 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3507}
3508
3509
3510/**
3511 * Common worker for MMX instructions on the form:
3512 * pxxx mm1, mm2/mem64
3513 */
3514FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3515{
3516 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3517 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3518 {
3519 /*
3520 * Register, register.
3521 */
3522 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3523 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3525 IEM_MC_BEGIN(2, 0);
3526 IEM_MC_ARG(uint64_t *, pDst, 0);
3527 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3528 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3529 IEM_MC_PREPARE_FPU_USAGE();
3530 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3531 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3532 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3533 IEM_MC_ADVANCE_RIP();
3534 IEM_MC_END();
3535 }
3536 else
3537 {
3538 /*
3539 * Register, memory.
3540 */
3541 IEM_MC_BEGIN(2, 2);
3542 IEM_MC_ARG(uint64_t *, pDst, 0);
3543 IEM_MC_LOCAL(uint64_t, uSrc);
3544 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3545 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3546
3547 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3549 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3550 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3551
3552 IEM_MC_PREPARE_FPU_USAGE();
3553 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3554 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3555
3556 IEM_MC_ADVANCE_RIP();
3557 IEM_MC_END();
3558 }
3559 return VINF_SUCCESS;
3560}
3561
3562
3563/**
3564 * Common worker for SSE2 instructions on the forms:
3565 * pxxx xmm1, xmm2/mem128
3566 *
3567 * Proper alignment of the 128-bit operand is enforced.
3568 * Exceptions type 4. SSE2 cpuid checks.
3569 */
3570FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3571{
3572 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3573 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3574 {
3575 /*
3576 * Register, register.
3577 */
3578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3579 IEM_MC_BEGIN(2, 0);
3580 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3581 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3582 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3583 IEM_MC_PREPARE_SSE_USAGE();
3584 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3585 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3586 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3587 IEM_MC_ADVANCE_RIP();
3588 IEM_MC_END();
3589 }
3590 else
3591 {
3592 /*
3593 * Register, memory.
3594 */
3595 IEM_MC_BEGIN(2, 2);
3596 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3597 IEM_MC_LOCAL(RTUINT128U, uSrc);
3598 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3600
3601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3603 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3604 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3605
3606 IEM_MC_PREPARE_SSE_USAGE();
3607 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3608 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3609
3610 IEM_MC_ADVANCE_RIP();
3611 IEM_MC_END();
3612 }
3613 return VINF_SUCCESS;
3614}
3615
3616
3617/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3618FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3619{
3620 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3621 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3622}
3623
3624/** Opcode 0x66 0x0f 0x74 - vpcmpeqb Vx, Hx, Wx */
3625FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3626{
3627 IEMOP_MNEMONIC(vpcmpeqb, "vpcmpeqb");
3628 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3629}
3630
3631/* Opcode 0xf3 0x0f 0x74 - invalid */
3632/* Opcode 0xf2 0x0f 0x74 - invalid */
3633
3634
3635/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3636FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3637{
3638 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3639 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3640}
3641
3642/** Opcode 0x66 0x0f 0x75 - vpcmpeqw Vx, Hx, Wx */
3643FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3644{
3645 IEMOP_MNEMONIC(vpcmpeqw, "vpcmpeqw");
3646 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3647}
3648
3649/* Opcode 0xf3 0x0f 0x75 - invalid */
3650/* Opcode 0xf2 0x0f 0x75 - invalid */
3651
3652
3653/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3654FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3655{
3656 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3657 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3658}
3659
3660/** Opcode 0x66 0x0f 0x76 - vpcmpeqd Vx, Hx, Wx */
3661FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3662{
3663 IEMOP_MNEMONIC(vpcmpeqd, "vpcmpeqd");
3664 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3665}
3666
3667/* Opcode 0xf3 0x0f 0x76 - invalid */
3668/* Opcode 0xf2 0x0f 0x76 - invalid */
3669
3670
3671/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3672FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3673/* Opcode 0x66 0x0f 0x77 - invalid */
3674/* Opcode 0xf3 0x0f 0x77 - invalid */
3675/* Opcode 0xf2 0x0f 0x77 - invalid */
3676
3677/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3678FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3679/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3680FNIEMOP_STUB(iemOp_AmdGrp17);
3681/* Opcode 0xf3 0x0f 0x78 - invalid */
3682/* Opcode 0xf2 0x0f 0x78 - invalid */
3683
3684/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3685FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3686/* Opcode 0x66 0x0f 0x79 - invalid */
3687/* Opcode 0xf3 0x0f 0x79 - invalid */
3688/* Opcode 0xf2 0x0f 0x79 - invalid */
3689
3690/* Opcode 0x0f 0x7a - invalid */
3691/* Opcode 0x66 0x0f 0x7a - invalid */
3692/* Opcode 0xf3 0x0f 0x7a - invalid */
3693/* Opcode 0xf2 0x0f 0x7a - invalid */
3694
3695/* Opcode 0x0f 0x7b - invalid */
3696/* Opcode 0x66 0x0f 0x7b - invalid */
3697/* Opcode 0xf3 0x0f 0x7b - invalid */
3698/* Opcode 0xf2 0x0f 0x7b - invalid */
3699
3700/* Opcode 0x0f 0x7c - invalid */
3701/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3702FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3703/* Opcode 0xf3 0x0f 0x7c - invalid */
3704/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3705FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3706
3707/* Opcode 0x0f 0x7d - invalid */
3708/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3709FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3710/* Opcode 0xf3 0x0f 0x7d - invalid */
3711/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3712FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3713
3714
3715/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
3716FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
3717{
3718 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3719 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3720 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3721 else
3722 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3723 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3724 {
3725 /* greg, MMX */
3726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3727 IEM_MC_BEGIN(0, 1);
3728 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3729 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3730 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3731 {
3732 IEM_MC_LOCAL(uint64_t, u64Tmp);
3733 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3734 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3735 }
3736 else
3737 {
3738 IEM_MC_LOCAL(uint32_t, u32Tmp);
3739 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3740 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3741 }
3742 IEM_MC_ADVANCE_RIP();
3743 IEM_MC_END();
3744 }
3745 else
3746 {
3747 /* [mem], MMX */
3748 IEM_MC_BEGIN(0, 2);
3749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3750 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3753 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3754 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3755 {
3756 IEM_MC_LOCAL(uint64_t, u64Tmp);
3757 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3758 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3759 }
3760 else
3761 {
3762 IEM_MC_LOCAL(uint32_t, u32Tmp);
3763 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3764 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3765 }
3766 IEM_MC_ADVANCE_RIP();
3767 IEM_MC_END();
3768 }
3769 return VINF_SUCCESS;
3770}
3771
3772/** Opcode 0x66 0x0f 0x7e - vmovd_q Ey, Vy */
3773FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
3774{
3775 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3776 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3777 IEMOP_MNEMONIC(vmovq_Eq_Wq, "vmovq Eq,Wq");
3778 else
3779 IEMOP_MNEMONIC(vmovd_Ed_Wd, "vmovd Ed,Wd");
3780 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3781 {
3782 /* greg, XMM */
3783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3784 IEM_MC_BEGIN(0, 1);
3785 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3786 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3787 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3788 {
3789 IEM_MC_LOCAL(uint64_t, u64Tmp);
3790 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3791 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3792 }
3793 else
3794 {
3795 IEM_MC_LOCAL(uint32_t, u32Tmp);
3796 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3797 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3798 }
3799 IEM_MC_ADVANCE_RIP();
3800 IEM_MC_END();
3801 }
3802 else
3803 {
3804 /* [mem], XMM */
3805 IEM_MC_BEGIN(0, 2);
3806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3807 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3810 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3811 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3812 {
3813 IEM_MC_LOCAL(uint64_t, u64Tmp);
3814 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3815 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3816 }
3817 else
3818 {
3819 IEM_MC_LOCAL(uint32_t, u32Tmp);
3820 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3821 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3822 }
3823 IEM_MC_ADVANCE_RIP();
3824 IEM_MC_END();
3825 }
3826 return VINF_SUCCESS;
3827}
3828
3829/** Opcode 0xf3 0x0f 0x7e - vmovq Vq, Wq */
3830FNIEMOP_STUB(iemOp_vmovq_Vq_Wq);
3831/* Opcode 0xf2 0x0f 0x7e - invalid */
3832
3833
3834/** Opcode 0x0f 0x7f - movq Qq, Pq */
3835FNIEMOP_DEF(iemOp_movq_Qq_Pq)
3836{
3837 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3838 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3839 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3840 {
3841 /*
3842 * Register, register.
3843 */
3844 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3845 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3847 IEM_MC_BEGIN(0, 1);
3848 IEM_MC_LOCAL(uint64_t, u64Tmp);
3849 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3850 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3851 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3852 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3853 IEM_MC_ADVANCE_RIP();
3854 IEM_MC_END();
3855 }
3856 else
3857 {
3858 /*
3859 * Register, memory.
3860 */
3861 IEM_MC_BEGIN(0, 2);
3862 IEM_MC_LOCAL(uint64_t, u64Tmp);
3863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3864
3865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3867 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3868 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3869
3870 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3871 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3872
3873 IEM_MC_ADVANCE_RIP();
3874 IEM_MC_END();
3875 }
3876 return VINF_SUCCESS;
3877}
3878
3879/** Opcode 0x66 0x0f 0x7f - vmovdqa Wx,Vx */
3880FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
3881{
3882 IEMOP_MNEMONIC(vmovdqa_Wdq_Vdq, "vmovdqa Wx,Vx");
3883 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3884 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3885 {
3886 /*
3887 * Register, register.
3888 */
3889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3890 IEM_MC_BEGIN(0, 0);
3891 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3892 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3893 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3894 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3895 IEM_MC_ADVANCE_RIP();
3896 IEM_MC_END();
3897 }
3898 else
3899 {
3900 /*
3901 * Register, memory.
3902 */
3903 IEM_MC_BEGIN(0, 2);
3904 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3906
3907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3909 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3910 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3911
3912 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3913 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3914
3915 IEM_MC_ADVANCE_RIP();
3916 IEM_MC_END();
3917 }
3918 return VINF_SUCCESS;
3919}
3920
3921/** Opcode 0xf3 0x0f 0x7f - vmovdqu Wx,Vx */
3922FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
3923{
3924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3925 IEMOP_MNEMONIC(vmovdqu_Wdq_Vdq, "vmovdqu Wx,Vx");
3926 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3927 {
3928 /*
3929 * Register, register.
3930 */
3931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3932 IEM_MC_BEGIN(0, 0);
3933 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3934 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3935 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3936 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3937 IEM_MC_ADVANCE_RIP();
3938 IEM_MC_END();
3939 }
3940 else
3941 {
3942 /*
3943 * Register, memory.
3944 */
3945 IEM_MC_BEGIN(0, 2);
3946 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3948
3949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3951 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3952 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3953
3954 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3955 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3956
3957 IEM_MC_ADVANCE_RIP();
3958 IEM_MC_END();
3959 }
3960 return VINF_SUCCESS;
3961}
3962
3963/* Opcode 0xf2 0x0f 0x7f - invalid */
3964
3965
3966
3967/** Opcode 0x0f 0x80. */
3968FNIEMOP_DEF(iemOp_jo_Jv)
3969{
3970 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3971 IEMOP_HLP_MIN_386();
3972 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3973 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3974 {
3975 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3977
3978 IEM_MC_BEGIN(0, 0);
3979 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3980 IEM_MC_REL_JMP_S16(i16Imm);
3981 } IEM_MC_ELSE() {
3982 IEM_MC_ADVANCE_RIP();
3983 } IEM_MC_ENDIF();
3984 IEM_MC_END();
3985 }
3986 else
3987 {
3988 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3990
3991 IEM_MC_BEGIN(0, 0);
3992 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3993 IEM_MC_REL_JMP_S32(i32Imm);
3994 } IEM_MC_ELSE() {
3995 IEM_MC_ADVANCE_RIP();
3996 } IEM_MC_ENDIF();
3997 IEM_MC_END();
3998 }
3999 return VINF_SUCCESS;
4000}
4001
4002
4003/** Opcode 0x0f 0x81. */
4004FNIEMOP_DEF(iemOp_jno_Jv)
4005{
4006 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4007 IEMOP_HLP_MIN_386();
4008 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4009 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4010 {
4011 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4013
4014 IEM_MC_BEGIN(0, 0);
4015 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4016 IEM_MC_ADVANCE_RIP();
4017 } IEM_MC_ELSE() {
4018 IEM_MC_REL_JMP_S16(i16Imm);
4019 } IEM_MC_ENDIF();
4020 IEM_MC_END();
4021 }
4022 else
4023 {
4024 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4026
4027 IEM_MC_BEGIN(0, 0);
4028 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4029 IEM_MC_ADVANCE_RIP();
4030 } IEM_MC_ELSE() {
4031 IEM_MC_REL_JMP_S32(i32Imm);
4032 } IEM_MC_ENDIF();
4033 IEM_MC_END();
4034 }
4035 return VINF_SUCCESS;
4036}
4037
4038
4039/** Opcode 0x0f 0x82. */
4040FNIEMOP_DEF(iemOp_jc_Jv)
4041{
4042 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4043 IEMOP_HLP_MIN_386();
4044 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4045 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4046 {
4047 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4049
4050 IEM_MC_BEGIN(0, 0);
4051 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4052 IEM_MC_REL_JMP_S16(i16Imm);
4053 } IEM_MC_ELSE() {
4054 IEM_MC_ADVANCE_RIP();
4055 } IEM_MC_ENDIF();
4056 IEM_MC_END();
4057 }
4058 else
4059 {
4060 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4062
4063 IEM_MC_BEGIN(0, 0);
4064 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4065 IEM_MC_REL_JMP_S32(i32Imm);
4066 } IEM_MC_ELSE() {
4067 IEM_MC_ADVANCE_RIP();
4068 } IEM_MC_ENDIF();
4069 IEM_MC_END();
4070 }
4071 return VINF_SUCCESS;
4072}
4073
4074
4075/** Opcode 0x0f 0x83. */
4076FNIEMOP_DEF(iemOp_jnc_Jv)
4077{
4078 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4079 IEMOP_HLP_MIN_386();
4080 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4081 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4082 {
4083 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4085
4086 IEM_MC_BEGIN(0, 0);
4087 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4088 IEM_MC_ADVANCE_RIP();
4089 } IEM_MC_ELSE() {
4090 IEM_MC_REL_JMP_S16(i16Imm);
4091 } IEM_MC_ENDIF();
4092 IEM_MC_END();
4093 }
4094 else
4095 {
4096 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4098
4099 IEM_MC_BEGIN(0, 0);
4100 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4101 IEM_MC_ADVANCE_RIP();
4102 } IEM_MC_ELSE() {
4103 IEM_MC_REL_JMP_S32(i32Imm);
4104 } IEM_MC_ENDIF();
4105 IEM_MC_END();
4106 }
4107 return VINF_SUCCESS;
4108}
4109
4110
4111/** Opcode 0x0f 0x84. */
4112FNIEMOP_DEF(iemOp_je_Jv)
4113{
4114 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4115 IEMOP_HLP_MIN_386();
4116 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4117 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4118 {
4119 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4121
4122 IEM_MC_BEGIN(0, 0);
4123 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4124 IEM_MC_REL_JMP_S16(i16Imm);
4125 } IEM_MC_ELSE() {
4126 IEM_MC_ADVANCE_RIP();
4127 } IEM_MC_ENDIF();
4128 IEM_MC_END();
4129 }
4130 else
4131 {
4132 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4134
4135 IEM_MC_BEGIN(0, 0);
4136 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4137 IEM_MC_REL_JMP_S32(i32Imm);
4138 } IEM_MC_ELSE() {
4139 IEM_MC_ADVANCE_RIP();
4140 } IEM_MC_ENDIF();
4141 IEM_MC_END();
4142 }
4143 return VINF_SUCCESS;
4144}
4145
4146
4147/** Opcode 0x0f 0x85. */
4148FNIEMOP_DEF(iemOp_jne_Jv)
4149{
4150 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4151 IEMOP_HLP_MIN_386();
4152 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4153 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4154 {
4155 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4157
4158 IEM_MC_BEGIN(0, 0);
4159 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4160 IEM_MC_ADVANCE_RIP();
4161 } IEM_MC_ELSE() {
4162 IEM_MC_REL_JMP_S16(i16Imm);
4163 } IEM_MC_ENDIF();
4164 IEM_MC_END();
4165 }
4166 else
4167 {
4168 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4170
4171 IEM_MC_BEGIN(0, 0);
4172 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4173 IEM_MC_ADVANCE_RIP();
4174 } IEM_MC_ELSE() {
4175 IEM_MC_REL_JMP_S32(i32Imm);
4176 } IEM_MC_ENDIF();
4177 IEM_MC_END();
4178 }
4179 return VINF_SUCCESS;
4180}
4181
4182
4183/** Opcode 0x0f 0x86. */
4184FNIEMOP_DEF(iemOp_jbe_Jv)
4185{
4186 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4187 IEMOP_HLP_MIN_386();
4188 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4189 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4190 {
4191 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4193
4194 IEM_MC_BEGIN(0, 0);
4195 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4196 IEM_MC_REL_JMP_S16(i16Imm);
4197 } IEM_MC_ELSE() {
4198 IEM_MC_ADVANCE_RIP();
4199 } IEM_MC_ENDIF();
4200 IEM_MC_END();
4201 }
4202 else
4203 {
4204 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4206
4207 IEM_MC_BEGIN(0, 0);
4208 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4209 IEM_MC_REL_JMP_S32(i32Imm);
4210 } IEM_MC_ELSE() {
4211 IEM_MC_ADVANCE_RIP();
4212 } IEM_MC_ENDIF();
4213 IEM_MC_END();
4214 }
4215 return VINF_SUCCESS;
4216}
4217
4218
4219/** Opcode 0x0f 0x87. */
4220FNIEMOP_DEF(iemOp_jnbe_Jv)
4221{
4222 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4223 IEMOP_HLP_MIN_386();
4224 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4225 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4226 {
4227 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4229
4230 IEM_MC_BEGIN(0, 0);
4231 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4232 IEM_MC_ADVANCE_RIP();
4233 } IEM_MC_ELSE() {
4234 IEM_MC_REL_JMP_S16(i16Imm);
4235 } IEM_MC_ENDIF();
4236 IEM_MC_END();
4237 }
4238 else
4239 {
4240 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4242
4243 IEM_MC_BEGIN(0, 0);
4244 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4245 IEM_MC_ADVANCE_RIP();
4246 } IEM_MC_ELSE() {
4247 IEM_MC_REL_JMP_S32(i32Imm);
4248 } IEM_MC_ENDIF();
4249 IEM_MC_END();
4250 }
4251 return VINF_SUCCESS;
4252}
4253
4254
4255/** Opcode 0x0f 0x88. */
4256FNIEMOP_DEF(iemOp_js_Jv)
4257{
4258 IEMOP_MNEMONIC(js_Jv, "js Jv");
4259 IEMOP_HLP_MIN_386();
4260 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4261 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4262 {
4263 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4265
4266 IEM_MC_BEGIN(0, 0);
4267 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4268 IEM_MC_REL_JMP_S16(i16Imm);
4269 } IEM_MC_ELSE() {
4270 IEM_MC_ADVANCE_RIP();
4271 } IEM_MC_ENDIF();
4272 IEM_MC_END();
4273 }
4274 else
4275 {
4276 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4278
4279 IEM_MC_BEGIN(0, 0);
4280 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4281 IEM_MC_REL_JMP_S32(i32Imm);
4282 } IEM_MC_ELSE() {
4283 IEM_MC_ADVANCE_RIP();
4284 } IEM_MC_ENDIF();
4285 IEM_MC_END();
4286 }
4287 return VINF_SUCCESS;
4288}
4289
4290
4291/** Opcode 0x0f 0x89. */
4292FNIEMOP_DEF(iemOp_jns_Jv)
4293{
4294 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4295 IEMOP_HLP_MIN_386();
4296 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4297 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4298 {
4299 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4301
4302 IEM_MC_BEGIN(0, 0);
4303 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4304 IEM_MC_ADVANCE_RIP();
4305 } IEM_MC_ELSE() {
4306 IEM_MC_REL_JMP_S16(i16Imm);
4307 } IEM_MC_ENDIF();
4308 IEM_MC_END();
4309 }
4310 else
4311 {
4312 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4314
4315 IEM_MC_BEGIN(0, 0);
4316 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4317 IEM_MC_ADVANCE_RIP();
4318 } IEM_MC_ELSE() {
4319 IEM_MC_REL_JMP_S32(i32Imm);
4320 } IEM_MC_ENDIF();
4321 IEM_MC_END();
4322 }
4323 return VINF_SUCCESS;
4324}
4325
4326
4327/** Opcode 0x0f 0x8a. */
4328FNIEMOP_DEF(iemOp_jp_Jv)
4329{
4330 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4331 IEMOP_HLP_MIN_386();
4332 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4333 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4334 {
4335 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4337
4338 IEM_MC_BEGIN(0, 0);
4339 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4340 IEM_MC_REL_JMP_S16(i16Imm);
4341 } IEM_MC_ELSE() {
4342 IEM_MC_ADVANCE_RIP();
4343 } IEM_MC_ENDIF();
4344 IEM_MC_END();
4345 }
4346 else
4347 {
4348 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4350
4351 IEM_MC_BEGIN(0, 0);
4352 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4353 IEM_MC_REL_JMP_S32(i32Imm);
4354 } IEM_MC_ELSE() {
4355 IEM_MC_ADVANCE_RIP();
4356 } IEM_MC_ENDIF();
4357 IEM_MC_END();
4358 }
4359 return VINF_SUCCESS;
4360}
4361
4362
4363/** Opcode 0x0f 0x8b. */
4364FNIEMOP_DEF(iemOp_jnp_Jv)
4365{
4366 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4367 IEMOP_HLP_MIN_386();
4368 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4369 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4370 {
4371 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4373
4374 IEM_MC_BEGIN(0, 0);
4375 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4376 IEM_MC_ADVANCE_RIP();
4377 } IEM_MC_ELSE() {
4378 IEM_MC_REL_JMP_S16(i16Imm);
4379 } IEM_MC_ENDIF();
4380 IEM_MC_END();
4381 }
4382 else
4383 {
4384 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4386
4387 IEM_MC_BEGIN(0, 0);
4388 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4389 IEM_MC_ADVANCE_RIP();
4390 } IEM_MC_ELSE() {
4391 IEM_MC_REL_JMP_S32(i32Imm);
4392 } IEM_MC_ENDIF();
4393 IEM_MC_END();
4394 }
4395 return VINF_SUCCESS;
4396}
4397
4398
4399/** Opcode 0x0f 0x8c. */
4400FNIEMOP_DEF(iemOp_jl_Jv)
4401{
4402 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4403 IEMOP_HLP_MIN_386();
4404 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4405 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4406 {
4407 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4409
4410 IEM_MC_BEGIN(0, 0);
4411 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4412 IEM_MC_REL_JMP_S16(i16Imm);
4413 } IEM_MC_ELSE() {
4414 IEM_MC_ADVANCE_RIP();
4415 } IEM_MC_ENDIF();
4416 IEM_MC_END();
4417 }
4418 else
4419 {
4420 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4422
4423 IEM_MC_BEGIN(0, 0);
4424 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4425 IEM_MC_REL_JMP_S32(i32Imm);
4426 } IEM_MC_ELSE() {
4427 IEM_MC_ADVANCE_RIP();
4428 } IEM_MC_ENDIF();
4429 IEM_MC_END();
4430 }
4431 return VINF_SUCCESS;
4432}
4433
4434
4435/** Opcode 0x0f 0x8d. */
4436FNIEMOP_DEF(iemOp_jnl_Jv)
4437{
4438 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4439 IEMOP_HLP_MIN_386();
4440 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4441 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4442 {
4443 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4445
4446 IEM_MC_BEGIN(0, 0);
4447 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4448 IEM_MC_ADVANCE_RIP();
4449 } IEM_MC_ELSE() {
4450 IEM_MC_REL_JMP_S16(i16Imm);
4451 } IEM_MC_ENDIF();
4452 IEM_MC_END();
4453 }
4454 else
4455 {
4456 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4458
4459 IEM_MC_BEGIN(0, 0);
4460 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4461 IEM_MC_ADVANCE_RIP();
4462 } IEM_MC_ELSE() {
4463 IEM_MC_REL_JMP_S32(i32Imm);
4464 } IEM_MC_ENDIF();
4465 IEM_MC_END();
4466 }
4467 return VINF_SUCCESS;
4468}
4469
4470
4471/** Opcode 0x0f 0x8e. */
4472FNIEMOP_DEF(iemOp_jle_Jv)
4473{
4474 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4475 IEMOP_HLP_MIN_386();
4476 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4477 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4478 {
4479 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4481
4482 IEM_MC_BEGIN(0, 0);
4483 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4484 IEM_MC_REL_JMP_S16(i16Imm);
4485 } IEM_MC_ELSE() {
4486 IEM_MC_ADVANCE_RIP();
4487 } IEM_MC_ENDIF();
4488 IEM_MC_END();
4489 }
4490 else
4491 {
4492 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4494
4495 IEM_MC_BEGIN(0, 0);
4496 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4497 IEM_MC_REL_JMP_S32(i32Imm);
4498 } IEM_MC_ELSE() {
4499 IEM_MC_ADVANCE_RIP();
4500 } IEM_MC_ENDIF();
4501 IEM_MC_END();
4502 }
4503 return VINF_SUCCESS;
4504}
4505
4506
4507/** Opcode 0x0f 0x8f. */
4508FNIEMOP_DEF(iemOp_jnle_Jv)
4509{
4510 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4511 IEMOP_HLP_MIN_386();
4512 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4513 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4514 {
4515 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4517
4518 IEM_MC_BEGIN(0, 0);
4519 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4520 IEM_MC_ADVANCE_RIP();
4521 } IEM_MC_ELSE() {
4522 IEM_MC_REL_JMP_S16(i16Imm);
4523 } IEM_MC_ENDIF();
4524 IEM_MC_END();
4525 }
4526 else
4527 {
4528 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4530
4531 IEM_MC_BEGIN(0, 0);
4532 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4533 IEM_MC_ADVANCE_RIP();
4534 } IEM_MC_ELSE() {
4535 IEM_MC_REL_JMP_S32(i32Imm);
4536 } IEM_MC_ENDIF();
4537 IEM_MC_END();
4538 }
4539 return VINF_SUCCESS;
4540}
4541
4542
4543/** Opcode 0x0f 0x90. */
4544FNIEMOP_DEF(iemOp_seto_Eb)
4545{
4546 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4547 IEMOP_HLP_MIN_386();
4548 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4549
4550 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4551 * any way. AMD says it's "unused", whatever that means. We're
4552 * ignoring for now. */
4553 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4554 {
4555 /* register target */
4556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4557 IEM_MC_BEGIN(0, 0);
4558 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4559 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4560 } IEM_MC_ELSE() {
4561 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4562 } IEM_MC_ENDIF();
4563 IEM_MC_ADVANCE_RIP();
4564 IEM_MC_END();
4565 }
4566 else
4567 {
4568 /* memory target */
4569 IEM_MC_BEGIN(0, 1);
4570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4573 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4574 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4575 } IEM_MC_ELSE() {
4576 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4577 } IEM_MC_ENDIF();
4578 IEM_MC_ADVANCE_RIP();
4579 IEM_MC_END();
4580 }
4581 return VINF_SUCCESS;
4582}
4583
4584
4585/** Opcode 0x0f 0x91. */
4586FNIEMOP_DEF(iemOp_setno_Eb)
4587{
4588 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4589 IEMOP_HLP_MIN_386();
4590 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4591
4592 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4593 * any way. AMD says it's "unused", whatever that means. We're
4594 * ignoring for now. */
4595 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4596 {
4597 /* register target */
4598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4599 IEM_MC_BEGIN(0, 0);
4600 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4601 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4602 } IEM_MC_ELSE() {
4603 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4604 } IEM_MC_ENDIF();
4605 IEM_MC_ADVANCE_RIP();
4606 IEM_MC_END();
4607 }
4608 else
4609 {
4610 /* memory target */
4611 IEM_MC_BEGIN(0, 1);
4612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4615 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4616 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4617 } IEM_MC_ELSE() {
4618 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4619 } IEM_MC_ENDIF();
4620 IEM_MC_ADVANCE_RIP();
4621 IEM_MC_END();
4622 }
4623 return VINF_SUCCESS;
4624}
4625
4626
4627/** Opcode 0x0f 0x92. */
4628FNIEMOP_DEF(iemOp_setc_Eb)
4629{
4630 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4631 IEMOP_HLP_MIN_386();
4632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4633
4634 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4635 * any way. AMD says it's "unused", whatever that means. We're
4636 * ignoring for now. */
4637 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4638 {
4639 /* register target */
4640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4641 IEM_MC_BEGIN(0, 0);
4642 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4643 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4644 } IEM_MC_ELSE() {
4645 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4646 } IEM_MC_ENDIF();
4647 IEM_MC_ADVANCE_RIP();
4648 IEM_MC_END();
4649 }
4650 else
4651 {
4652 /* memory target */
4653 IEM_MC_BEGIN(0, 1);
4654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4657 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4658 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4659 } IEM_MC_ELSE() {
4660 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4661 } IEM_MC_ENDIF();
4662 IEM_MC_ADVANCE_RIP();
4663 IEM_MC_END();
4664 }
4665 return VINF_SUCCESS;
4666}
4667
4668
4669/** Opcode 0x0f 0x93. */
4670FNIEMOP_DEF(iemOp_setnc_Eb)
4671{
4672 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4673 IEMOP_HLP_MIN_386();
4674 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4675
4676 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4677 * any way. AMD says it's "unused", whatever that means. We're
4678 * ignoring for now. */
4679 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4680 {
4681 /* register target */
4682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4683 IEM_MC_BEGIN(0, 0);
4684 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4685 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4686 } IEM_MC_ELSE() {
4687 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4688 } IEM_MC_ENDIF();
4689 IEM_MC_ADVANCE_RIP();
4690 IEM_MC_END();
4691 }
4692 else
4693 {
4694 /* memory target */
4695 IEM_MC_BEGIN(0, 1);
4696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4699 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4700 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4701 } IEM_MC_ELSE() {
4702 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4703 } IEM_MC_ENDIF();
4704 IEM_MC_ADVANCE_RIP();
4705 IEM_MC_END();
4706 }
4707 return VINF_SUCCESS;
4708}
4709
4710
4711/** Opcode 0x0f 0x94. */
4712FNIEMOP_DEF(iemOp_sete_Eb)
4713{
4714 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4715 IEMOP_HLP_MIN_386();
4716 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4717
4718 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4719 * any way. AMD says it's "unused", whatever that means. We're
4720 * ignoring for now. */
4721 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4722 {
4723 /* register target */
4724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4725 IEM_MC_BEGIN(0, 0);
4726 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4727 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4728 } IEM_MC_ELSE() {
4729 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4730 } IEM_MC_ENDIF();
4731 IEM_MC_ADVANCE_RIP();
4732 IEM_MC_END();
4733 }
4734 else
4735 {
4736 /* memory target */
4737 IEM_MC_BEGIN(0, 1);
4738 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4741 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4742 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4743 } IEM_MC_ELSE() {
4744 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4745 } IEM_MC_ENDIF();
4746 IEM_MC_ADVANCE_RIP();
4747 IEM_MC_END();
4748 }
4749 return VINF_SUCCESS;
4750}
4751
4752
4753/** Opcode 0x0f 0x95. */
4754FNIEMOP_DEF(iemOp_setne_Eb)
4755{
4756 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4757 IEMOP_HLP_MIN_386();
4758 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4759
4760 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4761 * any way. AMD says it's "unused", whatever that means. We're
4762 * ignoring for now. */
4763 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4764 {
4765 /* register target */
4766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4767 IEM_MC_BEGIN(0, 0);
4768 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4769 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4770 } IEM_MC_ELSE() {
4771 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4772 } IEM_MC_ENDIF();
4773 IEM_MC_ADVANCE_RIP();
4774 IEM_MC_END();
4775 }
4776 else
4777 {
4778 /* memory target */
4779 IEM_MC_BEGIN(0, 1);
4780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4781 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4783 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4784 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4785 } IEM_MC_ELSE() {
4786 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4787 } IEM_MC_ENDIF();
4788 IEM_MC_ADVANCE_RIP();
4789 IEM_MC_END();
4790 }
4791 return VINF_SUCCESS;
4792}
4793
4794
4795/** Opcode 0x0f 0x96. */
4796FNIEMOP_DEF(iemOp_setbe_Eb)
4797{
4798 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4799 IEMOP_HLP_MIN_386();
4800 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4801
4802 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4803 * any way. AMD says it's "unused", whatever that means. We're
4804 * ignoring for now. */
4805 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4806 {
4807 /* register target */
4808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4809 IEM_MC_BEGIN(0, 0);
4810 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4811 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4812 } IEM_MC_ELSE() {
4813 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4814 } IEM_MC_ENDIF();
4815 IEM_MC_ADVANCE_RIP();
4816 IEM_MC_END();
4817 }
4818 else
4819 {
4820 /* memory target */
4821 IEM_MC_BEGIN(0, 1);
4822 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4825 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4826 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4827 } IEM_MC_ELSE() {
4828 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4829 } IEM_MC_ENDIF();
4830 IEM_MC_ADVANCE_RIP();
4831 IEM_MC_END();
4832 }
4833 return VINF_SUCCESS;
4834}
4835
4836
4837/** Opcode 0x0f 0x97. */
4838FNIEMOP_DEF(iemOp_setnbe_Eb)
4839{
4840 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4841 IEMOP_HLP_MIN_386();
4842 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4843
4844 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4845 * any way. AMD says it's "unused", whatever that means. We're
4846 * ignoring for now. */
4847 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4848 {
4849 /* register target */
4850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4851 IEM_MC_BEGIN(0, 0);
4852 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4853 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4854 } IEM_MC_ELSE() {
4855 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4856 } IEM_MC_ENDIF();
4857 IEM_MC_ADVANCE_RIP();
4858 IEM_MC_END();
4859 }
4860 else
4861 {
4862 /* memory target */
4863 IEM_MC_BEGIN(0, 1);
4864 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4867 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4868 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4869 } IEM_MC_ELSE() {
4870 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4871 } IEM_MC_ENDIF();
4872 IEM_MC_ADVANCE_RIP();
4873 IEM_MC_END();
4874 }
4875 return VINF_SUCCESS;
4876}
4877
4878
4879/** Opcode 0x0f 0x98. */
4880FNIEMOP_DEF(iemOp_sets_Eb)
4881{
4882 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4883 IEMOP_HLP_MIN_386();
4884 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4885
4886 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4887 * any way. AMD says it's "unused", whatever that means. We're
4888 * ignoring for now. */
4889 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4890 {
4891 /* register target */
4892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4893 IEM_MC_BEGIN(0, 0);
4894 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4895 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4896 } IEM_MC_ELSE() {
4897 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4898 } IEM_MC_ENDIF();
4899 IEM_MC_ADVANCE_RIP();
4900 IEM_MC_END();
4901 }
4902 else
4903 {
4904 /* memory target */
4905 IEM_MC_BEGIN(0, 1);
4906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4909 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4910 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4911 } IEM_MC_ELSE() {
4912 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4913 } IEM_MC_ENDIF();
4914 IEM_MC_ADVANCE_RIP();
4915 IEM_MC_END();
4916 }
4917 return VINF_SUCCESS;
4918}
4919
4920
4921/** Opcode 0x0f 0x99. */
4922FNIEMOP_DEF(iemOp_setns_Eb)
4923{
4924 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4925 IEMOP_HLP_MIN_386();
4926 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4927
4928 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4929 * any way. AMD says it's "unused", whatever that means. We're
4930 * ignoring for now. */
4931 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4932 {
4933 /* register target */
4934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4935 IEM_MC_BEGIN(0, 0);
4936 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4937 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4938 } IEM_MC_ELSE() {
4939 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4940 } IEM_MC_ENDIF();
4941 IEM_MC_ADVANCE_RIP();
4942 IEM_MC_END();
4943 }
4944 else
4945 {
4946 /* memory target */
4947 IEM_MC_BEGIN(0, 1);
4948 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4951 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4952 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4953 } IEM_MC_ELSE() {
4954 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4955 } IEM_MC_ENDIF();
4956 IEM_MC_ADVANCE_RIP();
4957 IEM_MC_END();
4958 }
4959 return VINF_SUCCESS;
4960}
4961
4962
4963/** Opcode 0x0f 0x9a. */
4964FNIEMOP_DEF(iemOp_setp_Eb)
4965{
4966 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4967 IEMOP_HLP_MIN_386();
4968 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4969
4970 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4971 * any way. AMD says it's "unused", whatever that means. We're
4972 * ignoring for now. */
4973 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4974 {
4975 /* register target */
4976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4977 IEM_MC_BEGIN(0, 0);
4978 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4979 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4980 } IEM_MC_ELSE() {
4981 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4982 } IEM_MC_ENDIF();
4983 IEM_MC_ADVANCE_RIP();
4984 IEM_MC_END();
4985 }
4986 else
4987 {
4988 /* memory target */
4989 IEM_MC_BEGIN(0, 1);
4990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4991 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4993 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4994 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4995 } IEM_MC_ELSE() {
4996 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4997 } IEM_MC_ENDIF();
4998 IEM_MC_ADVANCE_RIP();
4999 IEM_MC_END();
5000 }
5001 return VINF_SUCCESS;
5002}
5003
5004
5005/** Opcode 0x0f 0x9b. */
5006FNIEMOP_DEF(iemOp_setnp_Eb)
5007{
5008 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5009 IEMOP_HLP_MIN_386();
5010 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5011
5012 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5013 * any way. AMD says it's "unused", whatever that means. We're
5014 * ignoring for now. */
5015 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5016 {
5017 /* register target */
5018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5019 IEM_MC_BEGIN(0, 0);
5020 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5021 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5022 } IEM_MC_ELSE() {
5023 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5024 } IEM_MC_ENDIF();
5025 IEM_MC_ADVANCE_RIP();
5026 IEM_MC_END();
5027 }
5028 else
5029 {
5030 /* memory target */
5031 IEM_MC_BEGIN(0, 1);
5032 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5035 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5036 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5037 } IEM_MC_ELSE() {
5038 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5039 } IEM_MC_ENDIF();
5040 IEM_MC_ADVANCE_RIP();
5041 IEM_MC_END();
5042 }
5043 return VINF_SUCCESS;
5044}
5045
5046
5047/** Opcode 0x0f 0x9c. */
5048FNIEMOP_DEF(iemOp_setl_Eb)
5049{
5050 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5051 IEMOP_HLP_MIN_386();
5052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5053
5054 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5055 * any way. AMD says it's "unused", whatever that means. We're
5056 * ignoring for now. */
5057 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5058 {
5059 /* register target */
5060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5061 IEM_MC_BEGIN(0, 0);
5062 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5063 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5064 } IEM_MC_ELSE() {
5065 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5066 } IEM_MC_ENDIF();
5067 IEM_MC_ADVANCE_RIP();
5068 IEM_MC_END();
5069 }
5070 else
5071 {
5072 /* memory target */
5073 IEM_MC_BEGIN(0, 1);
5074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5077 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5078 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5079 } IEM_MC_ELSE() {
5080 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5081 } IEM_MC_ENDIF();
5082 IEM_MC_ADVANCE_RIP();
5083 IEM_MC_END();
5084 }
5085 return VINF_SUCCESS;
5086}
5087
5088
5089/** Opcode 0x0f 0x9d. */
5090FNIEMOP_DEF(iemOp_setnl_Eb)
5091{
5092 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5093 IEMOP_HLP_MIN_386();
5094 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5095
5096 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5097 * any way. AMD says it's "unused", whatever that means. We're
5098 * ignoring for now. */
5099 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5100 {
5101 /* register target */
5102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5103 IEM_MC_BEGIN(0, 0);
5104 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5105 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5106 } IEM_MC_ELSE() {
5107 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5108 } IEM_MC_ENDIF();
5109 IEM_MC_ADVANCE_RIP();
5110 IEM_MC_END();
5111 }
5112 else
5113 {
5114 /* memory target */
5115 IEM_MC_BEGIN(0, 1);
5116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5119 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5120 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5121 } IEM_MC_ELSE() {
5122 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5123 } IEM_MC_ENDIF();
5124 IEM_MC_ADVANCE_RIP();
5125 IEM_MC_END();
5126 }
5127 return VINF_SUCCESS;
5128}
5129
5130
5131/** Opcode 0x0f 0x9e. */
5132FNIEMOP_DEF(iemOp_setle_Eb)
5133{
5134 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5135 IEMOP_HLP_MIN_386();
5136 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5137
5138 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5139 * any way. AMD says it's "unused", whatever that means. We're
5140 * ignoring for now. */
5141 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5142 {
5143 /* register target */
5144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5145 IEM_MC_BEGIN(0, 0);
5146 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5147 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5148 } IEM_MC_ELSE() {
5149 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5150 } IEM_MC_ENDIF();
5151 IEM_MC_ADVANCE_RIP();
5152 IEM_MC_END();
5153 }
5154 else
5155 {
5156 /* memory target */
5157 IEM_MC_BEGIN(0, 1);
5158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5161 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5162 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5163 } IEM_MC_ELSE() {
5164 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5165 } IEM_MC_ENDIF();
5166 IEM_MC_ADVANCE_RIP();
5167 IEM_MC_END();
5168 }
5169 return VINF_SUCCESS;
5170}
5171
5172
5173/** Opcode 0x0f 0x9f. */
5174FNIEMOP_DEF(iemOp_setnle_Eb)
5175{
5176 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5177 IEMOP_HLP_MIN_386();
5178 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5179
5180 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5181 * any way. AMD says it's "unused", whatever that means. We're
5182 * ignoring for now. */
5183 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5184 {
5185 /* register target */
5186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5187 IEM_MC_BEGIN(0, 0);
5188 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5189 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5190 } IEM_MC_ELSE() {
5191 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5192 } IEM_MC_ENDIF();
5193 IEM_MC_ADVANCE_RIP();
5194 IEM_MC_END();
5195 }
5196 else
5197 {
5198 /* memory target */
5199 IEM_MC_BEGIN(0, 1);
5200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5203 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5204 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5205 } IEM_MC_ELSE() {
5206 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5207 } IEM_MC_ENDIF();
5208 IEM_MC_ADVANCE_RIP();
5209 IEM_MC_END();
5210 }
5211 return VINF_SUCCESS;
5212}
5213
5214
5215/**
5216 * Common 'push segment-register' helper.
5217 */
5218FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5219{
5220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5221 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5222 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5223
5224 switch (pVCpu->iem.s.enmEffOpSize)
5225 {
5226 case IEMMODE_16BIT:
5227 IEM_MC_BEGIN(0, 1);
5228 IEM_MC_LOCAL(uint16_t, u16Value);
5229 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5230 IEM_MC_PUSH_U16(u16Value);
5231 IEM_MC_ADVANCE_RIP();
5232 IEM_MC_END();
5233 break;
5234
5235 case IEMMODE_32BIT:
5236 IEM_MC_BEGIN(0, 1);
5237 IEM_MC_LOCAL(uint32_t, u32Value);
5238 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5239 IEM_MC_PUSH_U32_SREG(u32Value);
5240 IEM_MC_ADVANCE_RIP();
5241 IEM_MC_END();
5242 break;
5243
5244 case IEMMODE_64BIT:
5245 IEM_MC_BEGIN(0, 1);
5246 IEM_MC_LOCAL(uint64_t, u64Value);
5247 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5248 IEM_MC_PUSH_U64(u64Value);
5249 IEM_MC_ADVANCE_RIP();
5250 IEM_MC_END();
5251 break;
5252 }
5253
5254 return VINF_SUCCESS;
5255}
5256
5257
5258/** Opcode 0x0f 0xa0. */
5259FNIEMOP_DEF(iemOp_push_fs)
5260{
5261 IEMOP_MNEMONIC(push_fs, "push fs");
5262 IEMOP_HLP_MIN_386();
5263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5264 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5265}
5266
5267
5268/** Opcode 0x0f 0xa1. */
5269FNIEMOP_DEF(iemOp_pop_fs)
5270{
5271 IEMOP_MNEMONIC(pop_fs, "pop fs");
5272 IEMOP_HLP_MIN_386();
5273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5274 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5275}
5276
5277
5278/** Opcode 0x0f 0xa2. */
5279FNIEMOP_DEF(iemOp_cpuid)
5280{
5281 IEMOP_MNEMONIC(cpuid, "cpuid");
5282 IEMOP_HLP_MIN_486(); /* not all 486es. */
5283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5284 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5285}
5286
5287
5288/**
5289 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5290 * iemOp_bts_Ev_Gv.
5291 */
5292FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5293{
5294 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5295 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5296
5297 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5298 {
5299 /* register destination. */
5300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5301 switch (pVCpu->iem.s.enmEffOpSize)
5302 {
5303 case IEMMODE_16BIT:
5304 IEM_MC_BEGIN(3, 0);
5305 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5306 IEM_MC_ARG(uint16_t, u16Src, 1);
5307 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5308
5309 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5310 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5311 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5312 IEM_MC_REF_EFLAGS(pEFlags);
5313 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5314
5315 IEM_MC_ADVANCE_RIP();
5316 IEM_MC_END();
5317 return VINF_SUCCESS;
5318
5319 case IEMMODE_32BIT:
5320 IEM_MC_BEGIN(3, 0);
5321 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5322 IEM_MC_ARG(uint32_t, u32Src, 1);
5323 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5324
5325 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5326 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5327 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5328 IEM_MC_REF_EFLAGS(pEFlags);
5329 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5330
5331 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5332 IEM_MC_ADVANCE_RIP();
5333 IEM_MC_END();
5334 return VINF_SUCCESS;
5335
5336 case IEMMODE_64BIT:
5337 IEM_MC_BEGIN(3, 0);
5338 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5339 IEM_MC_ARG(uint64_t, u64Src, 1);
5340 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5341
5342 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5343 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5344 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5345 IEM_MC_REF_EFLAGS(pEFlags);
5346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5347
5348 IEM_MC_ADVANCE_RIP();
5349 IEM_MC_END();
5350 return VINF_SUCCESS;
5351
5352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5353 }
5354 }
5355 else
5356 {
5357 /* memory destination. */
5358
5359 uint32_t fAccess;
5360 if (pImpl->pfnLockedU16)
5361 fAccess = IEM_ACCESS_DATA_RW;
5362 else /* BT */
5363 fAccess = IEM_ACCESS_DATA_R;
5364
5365 /** @todo test negative bit offsets! */
5366 switch (pVCpu->iem.s.enmEffOpSize)
5367 {
5368 case IEMMODE_16BIT:
5369 IEM_MC_BEGIN(3, 2);
5370 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5371 IEM_MC_ARG(uint16_t, u16Src, 1);
5372 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5374 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5375
5376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5377 if (pImpl->pfnLockedU16)
5378 IEMOP_HLP_DONE_DECODING();
5379 else
5380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5381 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5382 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5383 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5384 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5385 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5386 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5387 IEM_MC_FETCH_EFLAGS(EFlags);
5388
5389 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5390 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5391 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5392 else
5393 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5394 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5395
5396 IEM_MC_COMMIT_EFLAGS(EFlags);
5397 IEM_MC_ADVANCE_RIP();
5398 IEM_MC_END();
5399 return VINF_SUCCESS;
5400
5401 case IEMMODE_32BIT:
5402 IEM_MC_BEGIN(3, 2);
5403 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5404 IEM_MC_ARG(uint32_t, u32Src, 1);
5405 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5406 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5407 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5408
5409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5410 if (pImpl->pfnLockedU16)
5411 IEMOP_HLP_DONE_DECODING();
5412 else
5413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5414 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5415 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5416 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5417 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5418 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5419 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5420 IEM_MC_FETCH_EFLAGS(EFlags);
5421
5422 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5423 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5424 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5425 else
5426 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5427 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5428
5429 IEM_MC_COMMIT_EFLAGS(EFlags);
5430 IEM_MC_ADVANCE_RIP();
5431 IEM_MC_END();
5432 return VINF_SUCCESS;
5433
5434 case IEMMODE_64BIT:
5435 IEM_MC_BEGIN(3, 2);
5436 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5437 IEM_MC_ARG(uint64_t, u64Src, 1);
5438 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5440 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5441
5442 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5443 if (pImpl->pfnLockedU16)
5444 IEMOP_HLP_DONE_DECODING();
5445 else
5446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5447 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5448 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5449 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5450 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5451 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5452 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5453 IEM_MC_FETCH_EFLAGS(EFlags);
5454
5455 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5456 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5457 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5458 else
5459 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5460 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5461
5462 IEM_MC_COMMIT_EFLAGS(EFlags);
5463 IEM_MC_ADVANCE_RIP();
5464 IEM_MC_END();
5465 return VINF_SUCCESS;
5466
5467 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5468 }
5469 }
5470}
5471
5472
5473/** Opcode 0x0f 0xa3. */
5474FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5475{
5476 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5477 IEMOP_HLP_MIN_386();
5478 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5479}
5480
5481
5482/**
5483 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5484 */
5485FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5486{
5487 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5488 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5489
5490 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5491 {
5492 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5494
5495 switch (pVCpu->iem.s.enmEffOpSize)
5496 {
5497 case IEMMODE_16BIT:
5498 IEM_MC_BEGIN(4, 0);
5499 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5500 IEM_MC_ARG(uint16_t, u16Src, 1);
5501 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5502 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5503
5504 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5505 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5506 IEM_MC_REF_EFLAGS(pEFlags);
5507 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5508
5509 IEM_MC_ADVANCE_RIP();
5510 IEM_MC_END();
5511 return VINF_SUCCESS;
5512
5513 case IEMMODE_32BIT:
5514 IEM_MC_BEGIN(4, 0);
5515 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5516 IEM_MC_ARG(uint32_t, u32Src, 1);
5517 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5518 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5519
5520 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5521 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5522 IEM_MC_REF_EFLAGS(pEFlags);
5523 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5524
5525 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5526 IEM_MC_ADVANCE_RIP();
5527 IEM_MC_END();
5528 return VINF_SUCCESS;
5529
5530 case IEMMODE_64BIT:
5531 IEM_MC_BEGIN(4, 0);
5532 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5533 IEM_MC_ARG(uint64_t, u64Src, 1);
5534 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5535 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5536
5537 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5538 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5539 IEM_MC_REF_EFLAGS(pEFlags);
5540 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5541
5542 IEM_MC_ADVANCE_RIP();
5543 IEM_MC_END();
5544 return VINF_SUCCESS;
5545
5546 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5547 }
5548 }
5549 else
5550 {
5551 switch (pVCpu->iem.s.enmEffOpSize)
5552 {
5553 case IEMMODE_16BIT:
5554 IEM_MC_BEGIN(4, 2);
5555 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5556 IEM_MC_ARG(uint16_t, u16Src, 1);
5557 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5558 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5560
5561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5562 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5563 IEM_MC_ASSIGN(cShiftArg, cShift);
5564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5565 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5566 IEM_MC_FETCH_EFLAGS(EFlags);
5567 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5568 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5569
5570 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5571 IEM_MC_COMMIT_EFLAGS(EFlags);
5572 IEM_MC_ADVANCE_RIP();
5573 IEM_MC_END();
5574 return VINF_SUCCESS;
5575
5576 case IEMMODE_32BIT:
5577 IEM_MC_BEGIN(4, 2);
5578 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5579 IEM_MC_ARG(uint32_t, u32Src, 1);
5580 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5581 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5583
5584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5585 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5586 IEM_MC_ASSIGN(cShiftArg, cShift);
5587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5588 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5589 IEM_MC_FETCH_EFLAGS(EFlags);
5590 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5591 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5592
5593 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5594 IEM_MC_COMMIT_EFLAGS(EFlags);
5595 IEM_MC_ADVANCE_RIP();
5596 IEM_MC_END();
5597 return VINF_SUCCESS;
5598
5599 case IEMMODE_64BIT:
5600 IEM_MC_BEGIN(4, 2);
5601 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5602 IEM_MC_ARG(uint64_t, u64Src, 1);
5603 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5604 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5606
5607 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5608 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5609 IEM_MC_ASSIGN(cShiftArg, cShift);
5610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5611 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5612 IEM_MC_FETCH_EFLAGS(EFlags);
5613 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5614 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5615
5616 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5617 IEM_MC_COMMIT_EFLAGS(EFlags);
5618 IEM_MC_ADVANCE_RIP();
5619 IEM_MC_END();
5620 return VINF_SUCCESS;
5621
5622 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5623 }
5624 }
5625}
5626
5627
5628/**
5629 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5630 */
5631FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5632{
5633 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5634 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5635
5636 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5637 {
5638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5639
5640 switch (pVCpu->iem.s.enmEffOpSize)
5641 {
5642 case IEMMODE_16BIT:
5643 IEM_MC_BEGIN(4, 0);
5644 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5645 IEM_MC_ARG(uint16_t, u16Src, 1);
5646 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5647 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5648
5649 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5650 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5651 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5652 IEM_MC_REF_EFLAGS(pEFlags);
5653 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5654
5655 IEM_MC_ADVANCE_RIP();
5656 IEM_MC_END();
5657 return VINF_SUCCESS;
5658
5659 case IEMMODE_32BIT:
5660 IEM_MC_BEGIN(4, 0);
5661 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5662 IEM_MC_ARG(uint32_t, u32Src, 1);
5663 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5664 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5665
5666 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5667 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5668 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5669 IEM_MC_REF_EFLAGS(pEFlags);
5670 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5671
5672 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5673 IEM_MC_ADVANCE_RIP();
5674 IEM_MC_END();
5675 return VINF_SUCCESS;
5676
5677 case IEMMODE_64BIT:
5678 IEM_MC_BEGIN(4, 0);
5679 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5680 IEM_MC_ARG(uint64_t, u64Src, 1);
5681 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5682 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5683
5684 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5685 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5686 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5687 IEM_MC_REF_EFLAGS(pEFlags);
5688 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5689
5690 IEM_MC_ADVANCE_RIP();
5691 IEM_MC_END();
5692 return VINF_SUCCESS;
5693
5694 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5695 }
5696 }
5697 else
5698 {
5699 switch (pVCpu->iem.s.enmEffOpSize)
5700 {
5701 case IEMMODE_16BIT:
5702 IEM_MC_BEGIN(4, 2);
5703 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5704 IEM_MC_ARG(uint16_t, u16Src, 1);
5705 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5706 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5708
5709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5711 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5712 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5713 IEM_MC_FETCH_EFLAGS(EFlags);
5714 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5715 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5716
5717 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5718 IEM_MC_COMMIT_EFLAGS(EFlags);
5719 IEM_MC_ADVANCE_RIP();
5720 IEM_MC_END();
5721 return VINF_SUCCESS;
5722
5723 case IEMMODE_32BIT:
5724 IEM_MC_BEGIN(4, 2);
5725 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5726 IEM_MC_ARG(uint32_t, u32Src, 1);
5727 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5728 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5730
5731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5733 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5734 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5735 IEM_MC_FETCH_EFLAGS(EFlags);
5736 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5737 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5738
5739 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5740 IEM_MC_COMMIT_EFLAGS(EFlags);
5741 IEM_MC_ADVANCE_RIP();
5742 IEM_MC_END();
5743 return VINF_SUCCESS;
5744
5745 case IEMMODE_64BIT:
5746 IEM_MC_BEGIN(4, 2);
5747 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5748 IEM_MC_ARG(uint64_t, u64Src, 1);
5749 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5750 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5752
5753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5755 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5756 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5757 IEM_MC_FETCH_EFLAGS(EFlags);
5758 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5759 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5760
5761 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5762 IEM_MC_COMMIT_EFLAGS(EFlags);
5763 IEM_MC_ADVANCE_RIP();
5764 IEM_MC_END();
5765 return VINF_SUCCESS;
5766
5767 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5768 }
5769 }
5770}
5771
5772
5773
5774/** Opcode 0x0f 0xa4. */
5775FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5776{
5777 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5778 IEMOP_HLP_MIN_386();
5779 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5780}
5781
5782
5783/** Opcode 0x0f 0xa5. */
5784FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5785{
5786 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5787 IEMOP_HLP_MIN_386();
5788 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5789}
5790
5791
5792/** Opcode 0x0f 0xa8. */
5793FNIEMOP_DEF(iemOp_push_gs)
5794{
5795 IEMOP_MNEMONIC(push_gs, "push gs");
5796 IEMOP_HLP_MIN_386();
5797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5798 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5799}
5800
5801
5802/** Opcode 0x0f 0xa9. */
5803FNIEMOP_DEF(iemOp_pop_gs)
5804{
5805 IEMOP_MNEMONIC(pop_gs, "pop gs");
5806 IEMOP_HLP_MIN_386();
5807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5808 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5809}
5810
5811
5812/** Opcode 0x0f 0xaa. */
5813FNIEMOP_STUB(iemOp_rsm);
5814//IEMOP_HLP_MIN_386();
5815
5816
5817/** Opcode 0x0f 0xab. */
5818FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5819{
5820 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5821 IEMOP_HLP_MIN_386();
5822 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5823}
5824
5825
5826/** Opcode 0x0f 0xac. */
5827FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5828{
5829 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5830 IEMOP_HLP_MIN_386();
5831 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5832}
5833
5834
5835/** Opcode 0x0f 0xad. */
5836FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5837{
5838 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5839 IEMOP_HLP_MIN_386();
5840 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5841}
5842
5843
5844/** Opcode 0x0f 0xae mem/0. */
5845FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5846{
5847 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5848 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5849 return IEMOP_RAISE_INVALID_OPCODE();
5850
5851 IEM_MC_BEGIN(3, 1);
5852 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5853 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5854 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5857 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
5858 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5859 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5860 IEM_MC_END();
5861 return VINF_SUCCESS;
5862}
5863
5864
5865/** Opcode 0x0f 0xae mem/1. */
5866FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5867{
5868 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5869 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5870 return IEMOP_RAISE_INVALID_OPCODE();
5871
5872 IEM_MC_BEGIN(3, 1);
5873 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5874 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5875 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5878 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5879 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5880 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5881 IEM_MC_END();
5882 return VINF_SUCCESS;
5883}
5884
5885
5886/**
5887 * @opmaps grp15
5888 * @opcode !11/2
5889 * @oppfx none
5890 * @opcpuid sse
5891 * @opgroup og_sse_mxcsrsm
5892 * @opxcpttype 5
5893 * @optest op1=0 -> mxcsr=0
5894 * @optest op1=0x2083 -> mxcsr=0x2083
5895 * @optest op1=0xfffffffe -> value.xcpt=0xd
5896 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
5897 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
5898 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
5899 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
5900 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
5901 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5902 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5903 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5904 */
5905FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
5906{
5907 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
5908 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
5909 return IEMOP_RAISE_INVALID_OPCODE();
5910
5911 IEM_MC_BEGIN(2, 0);
5912 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5913 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5914 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5916 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5917 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5918 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
5919 IEM_MC_END();
5920 return VINF_SUCCESS;
5921}
5922
5923
5924/**
5925 * @opmaps grp15
5926 * @opcode !11/3
5927 * @oppfx none
5928 * @opcpuid sse
5929 * @opgroup og_sse_mxcsrsm
5930 * @opxcpttype 5
5931 * @optest mxcsr=0 -> op1=0
5932 * @optest mxcsr=0x2083 -> op1=0x2083
5933 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
5934 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
5935 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
5936 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
5937 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
5938 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5939 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5940 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5941 */
5942FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
5943{
5944 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, MdWO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
5945 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
5946 return IEMOP_RAISE_INVALID_OPCODE();
5947
5948 IEM_MC_BEGIN(2, 0);
5949 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5950 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5953 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5954 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5955 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
5956 IEM_MC_END();
5957 return VINF_SUCCESS;
5958}
5959
5960
5961/**
5962 * @opmaps vexgrp15
5963 * @opcode !11/3
5964 * @oppfx none
5965 * @opcpuid avx
5966 * @opgroup og_avx_mxcsrsm
5967 * @opxcpttype 5
5968 * @optest mxcsr=0 -> op1=0
5969 * @optest mxcsr=0x2083 -> op1=0x2083
5970 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
5971 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
5972 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
5973 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
5974 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
5975 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
5976 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
5977 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
5978 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
5979 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
5980 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5981 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
5982 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5983 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
5984 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5985 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
5986 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
5987 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
5988 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
5989 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
5990 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
5991 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
5992 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
5993 * -> value.xcpt=0x6
5994 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
5995 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
5996 * APMv4 rev 3.17 page 509.
5997 * @todo Test this instruction on AMD Ryzen.
5998 */
5999FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
6000{
6001 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, MdWO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6002 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6003 return IEMOP_RAISE_INVALID_OPCODE();
6004
6005 IEM_MC_BEGIN(2, 0);
6006 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6007 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6009 IEMOP_HLP_DONE_VEX_DECODING_L_ZERO_NO_VVV();
6010 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6011 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6012 IEM_MC_CALL_CIMPL_2(iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
6013 IEM_MC_END();
6014 return VINF_SUCCESS;
6015}
6016
6017
6018/**
6019 * @opmaps vexgrp15
6020 * @opcode !11/4
6021 * @oppfx none
6022 * @opcpuid xsave
6023 * @opgroup og_system
6024 * @opxcpttype none
6025 */
6026FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6027{
6028 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, MRW, DISOPTYPE_HARMLESS, 0);
6029 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6030 return IEMOP_RAISE_INVALID_OPCODE();
6031
6032 IEM_MC_BEGIN(3, 0);
6033 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6034 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6035 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6038 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6039 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6040 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6041 IEM_MC_END();
6042 return VINF_SUCCESS;
6043}
6044
6045
6046/**
6047 * @opmaps vexgrp15
6048 * @opcode !11/5
6049 * @oppfx none
6050 * @opcpuid xsave
6051 * @opgroup og_system
6052 * @opxcpttype none
6053 */
6054FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6055{
6056 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, MRO, DISOPTYPE_HARMLESS, 0);
6057 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6058 return IEMOP_RAISE_INVALID_OPCODE();
6059
6060 IEM_MC_BEGIN(3, 0);
6061 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6062 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6063 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6066 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6067 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6068 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6069 IEM_MC_END();
6070 return VINF_SUCCESS;
6071}
6072
6073/** Opcode 0x0f 0xae mem/6. */
6074FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6075
6076/**
6077 * @opmaps grp15
6078 * @opcode !11/7
6079 * @oppfx none
6080 * @opcpuid clfsh
6081 * @opgroup og_cachectl
6082 * @optest op1=1 ->
6083 */
6084FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6085{
6086 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6087 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6088 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6089
6090 IEM_MC_BEGIN(2, 0);
6091 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6092 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6095 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6096 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6097 IEM_MC_END();
6098 return VINF_SUCCESS;
6099}
6100
6101/**
6102 * @opmaps grp15
6103 * @opcode !11/7
6104 * @oppfx 0x66
6105 * @opcpuid clflushopt
6106 * @opgroup og_cachectl
6107 * @optest op1=1 ->
6108 */
6109FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6110{
6111 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6112 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6113 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6114
6115 IEM_MC_BEGIN(2, 0);
6116 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6117 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6120 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6121 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6122 IEM_MC_END();
6123 return VINF_SUCCESS;
6124}
6125
6126
6127/** Opcode 0x0f 0xae 11b/5. */
6128FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6129{
6130 RT_NOREF_PV(bRm);
6131 IEMOP_MNEMONIC(lfence, "lfence");
6132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6133 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6134 return IEMOP_RAISE_INVALID_OPCODE();
6135
6136 IEM_MC_BEGIN(0, 0);
6137 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6138 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6139 else
6140 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6141 IEM_MC_ADVANCE_RIP();
6142 IEM_MC_END();
6143 return VINF_SUCCESS;
6144}
6145
6146
6147/** Opcode 0x0f 0xae 11b/6. */
6148FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6149{
6150 RT_NOREF_PV(bRm);
6151 IEMOP_MNEMONIC(mfence, "mfence");
6152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6153 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6154 return IEMOP_RAISE_INVALID_OPCODE();
6155
6156 IEM_MC_BEGIN(0, 0);
6157 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6158 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6159 else
6160 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6161 IEM_MC_ADVANCE_RIP();
6162 IEM_MC_END();
6163 return VINF_SUCCESS;
6164}
6165
6166
6167/** Opcode 0x0f 0xae 11b/7. */
6168FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6169{
6170 RT_NOREF_PV(bRm);
6171 IEMOP_MNEMONIC(sfence, "sfence");
6172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6173 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6174 return IEMOP_RAISE_INVALID_OPCODE();
6175
6176 IEM_MC_BEGIN(0, 0);
6177 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6178 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6179 else
6180 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6181 IEM_MC_ADVANCE_RIP();
6182 IEM_MC_END();
6183 return VINF_SUCCESS;
6184}
6185
6186
6187/** Opcode 0xf3 0x0f 0xae 11b/0. */
6188FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6189
6190/** Opcode 0xf3 0x0f 0xae 11b/1. */
6191FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6192
6193/** Opcode 0xf3 0x0f 0xae 11b/2. */
6194FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6195
6196/** Opcode 0xf3 0x0f 0xae 11b/3. */
6197FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6198
6199
6200/**
6201 * Group 15 jump table for register variant.
6202 */
6203IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
6204{ /* pfx: none, 066h, 0f3h, 0f2h */
6205 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
6206 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
6207 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
6208 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
6209 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
6210 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6211 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6212 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6213};
6214AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
6215
6216
6217/**
6218 * Group 15 jump table for memory variant.
6219 */
6220IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
6221{ /* pfx: none, 066h, 0f3h, 0f2h */
6222 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6223 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6224 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6225 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6226 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6227 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6228 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6229 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6230};
6231AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
6232
6233
6234/** Opcode 0x0f 0xae. */
6235FNIEMOP_DEF(iemOp_Grp15)
6236{
6237 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6238 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6239 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6240 /* register, register */
6241 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6242 + pVCpu->iem.s.idxPrefix], bRm);
6243 /* memory, register */
6244 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6245 + pVCpu->iem.s.idxPrefix], bRm);
6246}
6247
6248
6249/**
6250 * Vex group 15 jump table for register variant.
6251 * @todo work in progress
6252 */
6253IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15RegReg[] =
6254{ /* pfx: none, 066h, 0f3h, 0f2h */
6255 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
6256 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
6257 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
6258 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
6259 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
6260 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6261 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6262 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6263};
6264AssertCompile(RT_ELEMENTS(g_apfnVexGroup15RegReg) == 8*4);
6265
6266
6267/**
6268 * Vex group 15 jump table for memory variant.
6269 * @todo work in progress
6270 */
6271IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
6272{ /* pfx: none, 066h, 0f3h, 0f2h */
6273 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6274 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6275 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6276 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6277 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6278 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6279 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6280 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6281};
6282AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
6283
6284
6285/** Opcode vex. 0xae. */
6286FNIEMOP_DEF(iemOp_VGrp15)
6287{
6288 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6289 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6290 /* register, register */
6291 return FNIEMOP_CALL_1(g_apfnVexGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6292 + pVCpu->iem.s.idxPrefix], bRm);
6293 /* memory, register */
6294 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6295 + pVCpu->iem.s.idxPrefix], bRm);
6296}
6297
6298
6299/** Opcode 0x0f 0xaf. */
6300FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6301{
6302 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6303 IEMOP_HLP_MIN_386();
6304 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6305 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6306}
6307
6308
6309/** Opcode 0x0f 0xb0. */
6310FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6311{
6312 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6313 IEMOP_HLP_MIN_486();
6314 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6315
6316 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6317 {
6318 IEMOP_HLP_DONE_DECODING();
6319 IEM_MC_BEGIN(4, 0);
6320 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6321 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6322 IEM_MC_ARG(uint8_t, u8Src, 2);
6323 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6324
6325 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6326 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6327 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6328 IEM_MC_REF_EFLAGS(pEFlags);
6329 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6330 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6331 else
6332 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6333
6334 IEM_MC_ADVANCE_RIP();
6335 IEM_MC_END();
6336 }
6337 else
6338 {
6339 IEM_MC_BEGIN(4, 3);
6340 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6341 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6342 IEM_MC_ARG(uint8_t, u8Src, 2);
6343 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6345 IEM_MC_LOCAL(uint8_t, u8Al);
6346
6347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6348 IEMOP_HLP_DONE_DECODING();
6349 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6350 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6351 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6352 IEM_MC_FETCH_EFLAGS(EFlags);
6353 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6354 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6355 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6356 else
6357 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6358
6359 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6360 IEM_MC_COMMIT_EFLAGS(EFlags);
6361 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6362 IEM_MC_ADVANCE_RIP();
6363 IEM_MC_END();
6364 }
6365 return VINF_SUCCESS;
6366}
6367
6368/** Opcode 0x0f 0xb1. */
6369FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6370{
6371 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6372 IEMOP_HLP_MIN_486();
6373 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6374
6375 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6376 {
6377 IEMOP_HLP_DONE_DECODING();
6378 switch (pVCpu->iem.s.enmEffOpSize)
6379 {
6380 case IEMMODE_16BIT:
6381 IEM_MC_BEGIN(4, 0);
6382 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6383 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6384 IEM_MC_ARG(uint16_t, u16Src, 2);
6385 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6386
6387 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6388 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6389 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6390 IEM_MC_REF_EFLAGS(pEFlags);
6391 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6392 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6393 else
6394 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6395
6396 IEM_MC_ADVANCE_RIP();
6397 IEM_MC_END();
6398 return VINF_SUCCESS;
6399
6400 case IEMMODE_32BIT:
6401 IEM_MC_BEGIN(4, 0);
6402 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6403 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6404 IEM_MC_ARG(uint32_t, u32Src, 2);
6405 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6406
6407 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6408 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6409 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6410 IEM_MC_REF_EFLAGS(pEFlags);
6411 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6412 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6413 else
6414 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6415
6416 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6417 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6418 IEM_MC_ADVANCE_RIP();
6419 IEM_MC_END();
6420 return VINF_SUCCESS;
6421
6422 case IEMMODE_64BIT:
6423 IEM_MC_BEGIN(4, 0);
6424 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6425 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6426#ifdef RT_ARCH_X86
6427 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6428#else
6429 IEM_MC_ARG(uint64_t, u64Src, 2);
6430#endif
6431 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6432
6433 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6434 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6435 IEM_MC_REF_EFLAGS(pEFlags);
6436#ifdef RT_ARCH_X86
6437 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6438 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6439 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6440 else
6441 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6442#else
6443 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6444 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6445 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6446 else
6447 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6448#endif
6449
6450 IEM_MC_ADVANCE_RIP();
6451 IEM_MC_END();
6452 return VINF_SUCCESS;
6453
6454 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6455 }
6456 }
6457 else
6458 {
6459 switch (pVCpu->iem.s.enmEffOpSize)
6460 {
6461 case IEMMODE_16BIT:
6462 IEM_MC_BEGIN(4, 3);
6463 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6464 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6465 IEM_MC_ARG(uint16_t, u16Src, 2);
6466 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6467 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6468 IEM_MC_LOCAL(uint16_t, u16Ax);
6469
6470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6471 IEMOP_HLP_DONE_DECODING();
6472 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6473 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6474 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6475 IEM_MC_FETCH_EFLAGS(EFlags);
6476 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6477 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6478 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6479 else
6480 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6481
6482 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6483 IEM_MC_COMMIT_EFLAGS(EFlags);
6484 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6485 IEM_MC_ADVANCE_RIP();
6486 IEM_MC_END();
6487 return VINF_SUCCESS;
6488
6489 case IEMMODE_32BIT:
6490 IEM_MC_BEGIN(4, 3);
6491 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6492 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6493 IEM_MC_ARG(uint32_t, u32Src, 2);
6494 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6496 IEM_MC_LOCAL(uint32_t, u32Eax);
6497
6498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6499 IEMOP_HLP_DONE_DECODING();
6500 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6501 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6502 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6503 IEM_MC_FETCH_EFLAGS(EFlags);
6504 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6505 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6506 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6507 else
6508 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6509
6510 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6511 IEM_MC_COMMIT_EFLAGS(EFlags);
6512 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6513 IEM_MC_ADVANCE_RIP();
6514 IEM_MC_END();
6515 return VINF_SUCCESS;
6516
6517 case IEMMODE_64BIT:
6518 IEM_MC_BEGIN(4, 3);
6519 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6520 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6521#ifdef RT_ARCH_X86
6522 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6523#else
6524 IEM_MC_ARG(uint64_t, u64Src, 2);
6525#endif
6526 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6528 IEM_MC_LOCAL(uint64_t, u64Rax);
6529
6530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6531 IEMOP_HLP_DONE_DECODING();
6532 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6533 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6534 IEM_MC_FETCH_EFLAGS(EFlags);
6535 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6536#ifdef RT_ARCH_X86
6537 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6538 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6539 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6540 else
6541 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6542#else
6543 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6544 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6545 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6546 else
6547 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6548#endif
6549
6550 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6551 IEM_MC_COMMIT_EFLAGS(EFlags);
6552 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6553 IEM_MC_ADVANCE_RIP();
6554 IEM_MC_END();
6555 return VINF_SUCCESS;
6556
6557 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6558 }
6559 }
6560}
6561
6562
6563FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6564{
6565 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6566 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6567
6568 switch (pVCpu->iem.s.enmEffOpSize)
6569 {
6570 case IEMMODE_16BIT:
6571 IEM_MC_BEGIN(5, 1);
6572 IEM_MC_ARG(uint16_t, uSel, 0);
6573 IEM_MC_ARG(uint16_t, offSeg, 1);
6574 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6575 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6576 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6577 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6580 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6581 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6582 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6583 IEM_MC_END();
6584 return VINF_SUCCESS;
6585
6586 case IEMMODE_32BIT:
6587 IEM_MC_BEGIN(5, 1);
6588 IEM_MC_ARG(uint16_t, uSel, 0);
6589 IEM_MC_ARG(uint32_t, offSeg, 1);
6590 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6591 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6592 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6593 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6596 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6597 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6598 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6599 IEM_MC_END();
6600 return VINF_SUCCESS;
6601
6602 case IEMMODE_64BIT:
6603 IEM_MC_BEGIN(5, 1);
6604 IEM_MC_ARG(uint16_t, uSel, 0);
6605 IEM_MC_ARG(uint64_t, offSeg, 1);
6606 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6607 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6608 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6609 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6612 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6613 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6614 else
6615 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6616 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6617 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6618 IEM_MC_END();
6619 return VINF_SUCCESS;
6620
6621 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6622 }
6623}
6624
6625
6626/** Opcode 0x0f 0xb2. */
6627FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6628{
6629 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6630 IEMOP_HLP_MIN_386();
6631 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6632 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6633 return IEMOP_RAISE_INVALID_OPCODE();
6634 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6635}
6636
6637
6638/** Opcode 0x0f 0xb3. */
6639FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6640{
6641 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6642 IEMOP_HLP_MIN_386();
6643 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6644}
6645
6646
6647/** Opcode 0x0f 0xb4. */
6648FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6649{
6650 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6651 IEMOP_HLP_MIN_386();
6652 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6653 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6654 return IEMOP_RAISE_INVALID_OPCODE();
6655 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6656}
6657
6658
6659/** Opcode 0x0f 0xb5. */
6660FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6661{
6662 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6663 IEMOP_HLP_MIN_386();
6664 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6665 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6666 return IEMOP_RAISE_INVALID_OPCODE();
6667 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6668}
6669
6670
6671/** Opcode 0x0f 0xb6. */
6672FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6673{
6674 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6675 IEMOP_HLP_MIN_386();
6676
6677 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6678
6679 /*
6680 * If rm is denoting a register, no more instruction bytes.
6681 */
6682 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6683 {
6684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6685 switch (pVCpu->iem.s.enmEffOpSize)
6686 {
6687 case IEMMODE_16BIT:
6688 IEM_MC_BEGIN(0, 1);
6689 IEM_MC_LOCAL(uint16_t, u16Value);
6690 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6691 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6692 IEM_MC_ADVANCE_RIP();
6693 IEM_MC_END();
6694 return VINF_SUCCESS;
6695
6696 case IEMMODE_32BIT:
6697 IEM_MC_BEGIN(0, 1);
6698 IEM_MC_LOCAL(uint32_t, u32Value);
6699 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6700 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6701 IEM_MC_ADVANCE_RIP();
6702 IEM_MC_END();
6703 return VINF_SUCCESS;
6704
6705 case IEMMODE_64BIT:
6706 IEM_MC_BEGIN(0, 1);
6707 IEM_MC_LOCAL(uint64_t, u64Value);
6708 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6709 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6710 IEM_MC_ADVANCE_RIP();
6711 IEM_MC_END();
6712 return VINF_SUCCESS;
6713
6714 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6715 }
6716 }
6717 else
6718 {
6719 /*
6720 * We're loading a register from memory.
6721 */
6722 switch (pVCpu->iem.s.enmEffOpSize)
6723 {
6724 case IEMMODE_16BIT:
6725 IEM_MC_BEGIN(0, 2);
6726 IEM_MC_LOCAL(uint16_t, u16Value);
6727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6730 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6731 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6732 IEM_MC_ADVANCE_RIP();
6733 IEM_MC_END();
6734 return VINF_SUCCESS;
6735
6736 case IEMMODE_32BIT:
6737 IEM_MC_BEGIN(0, 2);
6738 IEM_MC_LOCAL(uint32_t, u32Value);
6739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6742 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6743 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6744 IEM_MC_ADVANCE_RIP();
6745 IEM_MC_END();
6746 return VINF_SUCCESS;
6747
6748 case IEMMODE_64BIT:
6749 IEM_MC_BEGIN(0, 2);
6750 IEM_MC_LOCAL(uint64_t, u64Value);
6751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6754 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6755 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6756 IEM_MC_ADVANCE_RIP();
6757 IEM_MC_END();
6758 return VINF_SUCCESS;
6759
6760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6761 }
6762 }
6763}
6764
6765
6766/** Opcode 0x0f 0xb7. */
6767FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6768{
6769 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6770 IEMOP_HLP_MIN_386();
6771
6772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6773
6774 /** @todo Not entirely sure how the operand size prefix is handled here,
6775 * assuming that it will be ignored. Would be nice to have a few
6776 * test for this. */
6777 /*
6778 * If rm is denoting a register, no more instruction bytes.
6779 */
6780 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6781 {
6782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6783 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6784 {
6785 IEM_MC_BEGIN(0, 1);
6786 IEM_MC_LOCAL(uint32_t, u32Value);
6787 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6788 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6789 IEM_MC_ADVANCE_RIP();
6790 IEM_MC_END();
6791 }
6792 else
6793 {
6794 IEM_MC_BEGIN(0, 1);
6795 IEM_MC_LOCAL(uint64_t, u64Value);
6796 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6797 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6798 IEM_MC_ADVANCE_RIP();
6799 IEM_MC_END();
6800 }
6801 }
6802 else
6803 {
6804 /*
6805 * We're loading a register from memory.
6806 */
6807 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6808 {
6809 IEM_MC_BEGIN(0, 2);
6810 IEM_MC_LOCAL(uint32_t, u32Value);
6811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6814 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6815 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6816 IEM_MC_ADVANCE_RIP();
6817 IEM_MC_END();
6818 }
6819 else
6820 {
6821 IEM_MC_BEGIN(0, 2);
6822 IEM_MC_LOCAL(uint64_t, u64Value);
6823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6824 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6826 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6827 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6828 IEM_MC_ADVANCE_RIP();
6829 IEM_MC_END();
6830 }
6831 }
6832 return VINF_SUCCESS;
6833}
6834
6835
6836/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6837FNIEMOP_UD_STUB(iemOp_jmpe);
6838/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6839FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6840
6841
6842/**
6843 * @opcode 0xb9
6844 * @opinvalid intel-modrm
6845 * @optest ->
6846 */
6847FNIEMOP_DEF(iemOp_Grp10)
6848{
6849 /*
6850 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
6851 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
6852 */
6853 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
6854 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZE); /* just picked Gb,Eb here. */
6855 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
6856}
6857
6858
6859/** Opcode 0x0f 0xba. */
6860FNIEMOP_DEF(iemOp_Grp8)
6861{
6862 IEMOP_HLP_MIN_386();
6863 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6864 PCIEMOPBINSIZES pImpl;
6865 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6866 {
6867 case 0: case 1: case 2: case 3:
6868 /* Both AMD and Intel want full modr/m decoding and imm8. */
6869 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
6870 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6871 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6872 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6873 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6874 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6875 }
6876 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6877
6878 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6879 {
6880 /* register destination. */
6881 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6883
6884 switch (pVCpu->iem.s.enmEffOpSize)
6885 {
6886 case IEMMODE_16BIT:
6887 IEM_MC_BEGIN(3, 0);
6888 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6889 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6890 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6891
6892 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6893 IEM_MC_REF_EFLAGS(pEFlags);
6894 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6895
6896 IEM_MC_ADVANCE_RIP();
6897 IEM_MC_END();
6898 return VINF_SUCCESS;
6899
6900 case IEMMODE_32BIT:
6901 IEM_MC_BEGIN(3, 0);
6902 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6903 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6904 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6905
6906 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6907 IEM_MC_REF_EFLAGS(pEFlags);
6908 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6909
6910 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6911 IEM_MC_ADVANCE_RIP();
6912 IEM_MC_END();
6913 return VINF_SUCCESS;
6914
6915 case IEMMODE_64BIT:
6916 IEM_MC_BEGIN(3, 0);
6917 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6918 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6919 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6920
6921 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6922 IEM_MC_REF_EFLAGS(pEFlags);
6923 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6924
6925 IEM_MC_ADVANCE_RIP();
6926 IEM_MC_END();
6927 return VINF_SUCCESS;
6928
6929 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6930 }
6931 }
6932 else
6933 {
6934 /* memory destination. */
6935
6936 uint32_t fAccess;
6937 if (pImpl->pfnLockedU16)
6938 fAccess = IEM_ACCESS_DATA_RW;
6939 else /* BT */
6940 fAccess = IEM_ACCESS_DATA_R;
6941
6942 /** @todo test negative bit offsets! */
6943 switch (pVCpu->iem.s.enmEffOpSize)
6944 {
6945 case IEMMODE_16BIT:
6946 IEM_MC_BEGIN(3, 1);
6947 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6948 IEM_MC_ARG(uint16_t, u16Src, 1);
6949 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6951
6952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6953 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6954 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6955 if (pImpl->pfnLockedU16)
6956 IEMOP_HLP_DONE_DECODING();
6957 else
6958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6959 IEM_MC_FETCH_EFLAGS(EFlags);
6960 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6961 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6962 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6963 else
6964 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6965 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6966
6967 IEM_MC_COMMIT_EFLAGS(EFlags);
6968 IEM_MC_ADVANCE_RIP();
6969 IEM_MC_END();
6970 return VINF_SUCCESS;
6971
6972 case IEMMODE_32BIT:
6973 IEM_MC_BEGIN(3, 1);
6974 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6975 IEM_MC_ARG(uint32_t, u32Src, 1);
6976 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6978
6979 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6980 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6981 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6982 if (pImpl->pfnLockedU16)
6983 IEMOP_HLP_DONE_DECODING();
6984 else
6985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6986 IEM_MC_FETCH_EFLAGS(EFlags);
6987 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6988 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6989 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6990 else
6991 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6992 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6993
6994 IEM_MC_COMMIT_EFLAGS(EFlags);
6995 IEM_MC_ADVANCE_RIP();
6996 IEM_MC_END();
6997 return VINF_SUCCESS;
6998
6999 case IEMMODE_64BIT:
7000 IEM_MC_BEGIN(3, 1);
7001 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7002 IEM_MC_ARG(uint64_t, u64Src, 1);
7003 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7005
7006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7007 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7008 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7009 if (pImpl->pfnLockedU16)
7010 IEMOP_HLP_DONE_DECODING();
7011 else
7012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7013 IEM_MC_FETCH_EFLAGS(EFlags);
7014 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7015 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7016 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7017 else
7018 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7019 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7020
7021 IEM_MC_COMMIT_EFLAGS(EFlags);
7022 IEM_MC_ADVANCE_RIP();
7023 IEM_MC_END();
7024 return VINF_SUCCESS;
7025
7026 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7027 }
7028 }
7029}
7030
7031
7032/** Opcode 0x0f 0xbb. */
7033FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7034{
7035 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7036 IEMOP_HLP_MIN_386();
7037 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7038}
7039
7040
7041/** Opcode 0x0f 0xbc. */
7042FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7043{
7044 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7045 IEMOP_HLP_MIN_386();
7046 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7047 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7048}
7049
7050
7051/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7052FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7053
7054
7055/** Opcode 0x0f 0xbd. */
7056FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7057{
7058 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7059 IEMOP_HLP_MIN_386();
7060 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7061 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7062}
7063
7064
7065/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7066FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7067
7068
7069/** Opcode 0x0f 0xbe. */
7070FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7071{
7072 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7073 IEMOP_HLP_MIN_386();
7074
7075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7076
7077 /*
7078 * If rm is denoting a register, no more instruction bytes.
7079 */
7080 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7081 {
7082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7083 switch (pVCpu->iem.s.enmEffOpSize)
7084 {
7085 case IEMMODE_16BIT:
7086 IEM_MC_BEGIN(0, 1);
7087 IEM_MC_LOCAL(uint16_t, u16Value);
7088 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7089 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7090 IEM_MC_ADVANCE_RIP();
7091 IEM_MC_END();
7092 return VINF_SUCCESS;
7093
7094 case IEMMODE_32BIT:
7095 IEM_MC_BEGIN(0, 1);
7096 IEM_MC_LOCAL(uint32_t, u32Value);
7097 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7098 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7099 IEM_MC_ADVANCE_RIP();
7100 IEM_MC_END();
7101 return VINF_SUCCESS;
7102
7103 case IEMMODE_64BIT:
7104 IEM_MC_BEGIN(0, 1);
7105 IEM_MC_LOCAL(uint64_t, u64Value);
7106 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7107 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7108 IEM_MC_ADVANCE_RIP();
7109 IEM_MC_END();
7110 return VINF_SUCCESS;
7111
7112 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7113 }
7114 }
7115 else
7116 {
7117 /*
7118 * We're loading a register from memory.
7119 */
7120 switch (pVCpu->iem.s.enmEffOpSize)
7121 {
7122 case IEMMODE_16BIT:
7123 IEM_MC_BEGIN(0, 2);
7124 IEM_MC_LOCAL(uint16_t, u16Value);
7125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7128 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7129 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7130 IEM_MC_ADVANCE_RIP();
7131 IEM_MC_END();
7132 return VINF_SUCCESS;
7133
7134 case IEMMODE_32BIT:
7135 IEM_MC_BEGIN(0, 2);
7136 IEM_MC_LOCAL(uint32_t, u32Value);
7137 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7140 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7141 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7142 IEM_MC_ADVANCE_RIP();
7143 IEM_MC_END();
7144 return VINF_SUCCESS;
7145
7146 case IEMMODE_64BIT:
7147 IEM_MC_BEGIN(0, 2);
7148 IEM_MC_LOCAL(uint64_t, u64Value);
7149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7152 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7153 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7154 IEM_MC_ADVANCE_RIP();
7155 IEM_MC_END();
7156 return VINF_SUCCESS;
7157
7158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7159 }
7160 }
7161}
7162
7163
7164/** Opcode 0x0f 0xbf. */
7165FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7166{
7167 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7168 IEMOP_HLP_MIN_386();
7169
7170 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7171
7172 /** @todo Not entirely sure how the operand size prefix is handled here,
7173 * assuming that it will be ignored. Would be nice to have a few
7174 * test for this. */
7175 /*
7176 * If rm is denoting a register, no more instruction bytes.
7177 */
7178 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7179 {
7180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7181 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7182 {
7183 IEM_MC_BEGIN(0, 1);
7184 IEM_MC_LOCAL(uint32_t, u32Value);
7185 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7186 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7187 IEM_MC_ADVANCE_RIP();
7188 IEM_MC_END();
7189 }
7190 else
7191 {
7192 IEM_MC_BEGIN(0, 1);
7193 IEM_MC_LOCAL(uint64_t, u64Value);
7194 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7195 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7196 IEM_MC_ADVANCE_RIP();
7197 IEM_MC_END();
7198 }
7199 }
7200 else
7201 {
7202 /*
7203 * We're loading a register from memory.
7204 */
7205 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7206 {
7207 IEM_MC_BEGIN(0, 2);
7208 IEM_MC_LOCAL(uint32_t, u32Value);
7209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7212 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7213 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7214 IEM_MC_ADVANCE_RIP();
7215 IEM_MC_END();
7216 }
7217 else
7218 {
7219 IEM_MC_BEGIN(0, 2);
7220 IEM_MC_LOCAL(uint64_t, u64Value);
7221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7224 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7225 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7226 IEM_MC_ADVANCE_RIP();
7227 IEM_MC_END();
7228 }
7229 }
7230 return VINF_SUCCESS;
7231}
7232
7233
7234/** Opcode 0x0f 0xc0. */
7235FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7236{
7237 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7238 IEMOP_HLP_MIN_486();
7239 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7240
7241 /*
7242 * If rm is denoting a register, no more instruction bytes.
7243 */
7244 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7245 {
7246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7247
7248 IEM_MC_BEGIN(3, 0);
7249 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7250 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7251 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7252
7253 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7254 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7255 IEM_MC_REF_EFLAGS(pEFlags);
7256 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7257
7258 IEM_MC_ADVANCE_RIP();
7259 IEM_MC_END();
7260 }
7261 else
7262 {
7263 /*
7264 * We're accessing memory.
7265 */
7266 IEM_MC_BEGIN(3, 3);
7267 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7268 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7269 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7270 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7272
7273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7274 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7275 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7276 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7277 IEM_MC_FETCH_EFLAGS(EFlags);
7278 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7279 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7280 else
7281 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7282
7283 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7284 IEM_MC_COMMIT_EFLAGS(EFlags);
7285 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7286 IEM_MC_ADVANCE_RIP();
7287 IEM_MC_END();
7288 return VINF_SUCCESS;
7289 }
7290 return VINF_SUCCESS;
7291}
7292
7293
7294/** Opcode 0x0f 0xc1. */
7295FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7296{
7297 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7298 IEMOP_HLP_MIN_486();
7299 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7300
7301 /*
7302 * If rm is denoting a register, no more instruction bytes.
7303 */
7304 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7305 {
7306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7307
7308 switch (pVCpu->iem.s.enmEffOpSize)
7309 {
7310 case IEMMODE_16BIT:
7311 IEM_MC_BEGIN(3, 0);
7312 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7313 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7314 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7315
7316 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7317 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7318 IEM_MC_REF_EFLAGS(pEFlags);
7319 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7320
7321 IEM_MC_ADVANCE_RIP();
7322 IEM_MC_END();
7323 return VINF_SUCCESS;
7324
7325 case IEMMODE_32BIT:
7326 IEM_MC_BEGIN(3, 0);
7327 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7328 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7329 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7330
7331 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7332 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7333 IEM_MC_REF_EFLAGS(pEFlags);
7334 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7335
7336 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7337 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7338 IEM_MC_ADVANCE_RIP();
7339 IEM_MC_END();
7340 return VINF_SUCCESS;
7341
7342 case IEMMODE_64BIT:
7343 IEM_MC_BEGIN(3, 0);
7344 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7345 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7346 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7347
7348 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7349 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7350 IEM_MC_REF_EFLAGS(pEFlags);
7351 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7352
7353 IEM_MC_ADVANCE_RIP();
7354 IEM_MC_END();
7355 return VINF_SUCCESS;
7356
7357 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7358 }
7359 }
7360 else
7361 {
7362 /*
7363 * We're accessing memory.
7364 */
7365 switch (pVCpu->iem.s.enmEffOpSize)
7366 {
7367 case IEMMODE_16BIT:
7368 IEM_MC_BEGIN(3, 3);
7369 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7370 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7371 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7372 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7374
7375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7376 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7377 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7378 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7379 IEM_MC_FETCH_EFLAGS(EFlags);
7380 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7381 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7382 else
7383 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7384
7385 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7386 IEM_MC_COMMIT_EFLAGS(EFlags);
7387 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7388 IEM_MC_ADVANCE_RIP();
7389 IEM_MC_END();
7390 return VINF_SUCCESS;
7391
7392 case IEMMODE_32BIT:
7393 IEM_MC_BEGIN(3, 3);
7394 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7395 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7396 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7397 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7399
7400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7401 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7402 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7403 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7404 IEM_MC_FETCH_EFLAGS(EFlags);
7405 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7406 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7407 else
7408 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7409
7410 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7411 IEM_MC_COMMIT_EFLAGS(EFlags);
7412 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7413 IEM_MC_ADVANCE_RIP();
7414 IEM_MC_END();
7415 return VINF_SUCCESS;
7416
7417 case IEMMODE_64BIT:
7418 IEM_MC_BEGIN(3, 3);
7419 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7420 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7421 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7422 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7424
7425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7426 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7427 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7428 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7429 IEM_MC_FETCH_EFLAGS(EFlags);
7430 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7431 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7432 else
7433 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7434
7435 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7436 IEM_MC_COMMIT_EFLAGS(EFlags);
7437 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7438 IEM_MC_ADVANCE_RIP();
7439 IEM_MC_END();
7440 return VINF_SUCCESS;
7441
7442 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7443 }
7444 }
7445}
7446
7447
7448/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
7449FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
7450/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
7451FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
7452/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
7453FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
7454/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
7455FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
7456
7457
7458/** Opcode 0x0f 0xc3. */
7459FNIEMOP_DEF(iemOp_movnti_My_Gy)
7460{
7461 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7462
7463 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7464
7465 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7466 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7467 {
7468 switch (pVCpu->iem.s.enmEffOpSize)
7469 {
7470 case IEMMODE_32BIT:
7471 IEM_MC_BEGIN(0, 2);
7472 IEM_MC_LOCAL(uint32_t, u32Value);
7473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7474
7475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7477 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7478 return IEMOP_RAISE_INVALID_OPCODE();
7479
7480 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7481 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7482 IEM_MC_ADVANCE_RIP();
7483 IEM_MC_END();
7484 break;
7485
7486 case IEMMODE_64BIT:
7487 IEM_MC_BEGIN(0, 2);
7488 IEM_MC_LOCAL(uint64_t, u64Value);
7489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7490
7491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7493 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7494 return IEMOP_RAISE_INVALID_OPCODE();
7495
7496 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7497 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7498 IEM_MC_ADVANCE_RIP();
7499 IEM_MC_END();
7500 break;
7501
7502 case IEMMODE_16BIT:
7503 /** @todo check this form. */
7504 return IEMOP_RAISE_INVALID_OPCODE();
7505 }
7506 }
7507 else
7508 return IEMOP_RAISE_INVALID_OPCODE();
7509 return VINF_SUCCESS;
7510}
7511/* Opcode 0x66 0x0f 0xc3 - invalid */
7512/* Opcode 0xf3 0x0f 0xc3 - invalid */
7513/* Opcode 0xf2 0x0f 0xc3 - invalid */
7514
7515/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
7516FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7517/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
7518FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
7519/* Opcode 0xf3 0x0f 0xc4 - invalid */
7520/* Opcode 0xf2 0x0f 0xc4 - invalid */
7521
7522/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7523FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7524/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
7525FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
7526/* Opcode 0xf3 0x0f 0xc5 - invalid */
7527/* Opcode 0xf2 0x0f 0xc5 - invalid */
7528
7529/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
7530FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
7531/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
7532FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
7533/* Opcode 0xf3 0x0f 0xc6 - invalid */
7534/* Opcode 0xf2 0x0f 0xc6 - invalid */
7535
7536
7537/** Opcode 0x0f 0xc7 !11/1. */
7538FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7539{
7540 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7541
7542 IEM_MC_BEGIN(4, 3);
7543 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7544 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7545 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7546 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7547 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7548 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7550
7551 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7552 IEMOP_HLP_DONE_DECODING();
7553 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7554
7555 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7556 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7557 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7558
7559 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7560 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7561 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7562
7563 IEM_MC_FETCH_EFLAGS(EFlags);
7564 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7565 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7566 else
7567 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7568
7569 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7570 IEM_MC_COMMIT_EFLAGS(EFlags);
7571 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7572 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7573 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7574 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7575 IEM_MC_ENDIF();
7576 IEM_MC_ADVANCE_RIP();
7577
7578 IEM_MC_END();
7579 return VINF_SUCCESS;
7580}
7581
7582
7583/** Opcode REX.W 0x0f 0xc7 !11/1. */
7584FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7585{
7586 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7587 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7588 {
7589#if 0
7590 RT_NOREF(bRm);
7591 IEMOP_BITCH_ABOUT_STUB();
7592 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7593#else
7594 IEM_MC_BEGIN(4, 3);
7595 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7596 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7597 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7598 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7599 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7600 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7602
7603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7604 IEMOP_HLP_DONE_DECODING();
7605 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7606 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7607
7608 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7609 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7610 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7611
7612 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7613 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7614 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7615
7616 IEM_MC_FETCH_EFLAGS(EFlags);
7617# ifdef RT_ARCH_AMD64
7618 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7619 {
7620 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7621 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7622 else
7623 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7624 }
7625 else
7626# endif
7627 {
7628 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7629 accesses and not all all atomic, which works fine on in UNI CPU guest
7630 configuration (ignoring DMA). If guest SMP is active we have no choice
7631 but to use a rendezvous callback here. Sigh. */
7632 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7633 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7634 else
7635 {
7636 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7637 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7638 }
7639 }
7640
7641 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7642 IEM_MC_COMMIT_EFLAGS(EFlags);
7643 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7644 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7645 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7646 IEM_MC_ENDIF();
7647 IEM_MC_ADVANCE_RIP();
7648
7649 IEM_MC_END();
7650 return VINF_SUCCESS;
7651#endif
7652 }
7653 Log(("cmpxchg16b -> #UD\n"));
7654 return IEMOP_RAISE_INVALID_OPCODE();
7655}
7656
7657FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
7658{
7659 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7660 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7661 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7662}
7663
7664/** Opcode 0x0f 0xc7 11/6. */
7665FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7666
7667/** Opcode 0x0f 0xc7 !11/6. */
7668FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7669
7670/** Opcode 0x66 0x0f 0xc7 !11/6. */
7671FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7672
7673/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7674FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7675
7676/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7677FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7678
7679/** Opcode 0x0f 0xc7 11/7. */
7680FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
7681
7682
7683/**
7684 * Group 9 jump table for register variant.
7685 */
7686IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
7687{ /* pfx: none, 066h, 0f3h, 0f2h */
7688 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
7689 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
7690 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
7691 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
7692 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7693 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
7694 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7695 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7696};
7697AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
7698
7699
7700/**
7701 * Group 9 jump table for memory variant.
7702 */
7703IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
7704{ /* pfx: none, 066h, 0f3h, 0f2h */
7705 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
7706 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
7707 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
7708 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
7709 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7710 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
7711 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
7712 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7713};
7714AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
7715
7716
7717/** Opcode 0x0f 0xc7. */
7718FNIEMOP_DEF(iemOp_Grp9)
7719{
7720 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7721 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7722 /* register, register */
7723 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7724 + pVCpu->iem.s.idxPrefix], bRm);
7725 /* memory, register */
7726 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7727 + pVCpu->iem.s.idxPrefix], bRm);
7728}
7729
7730
7731/**
7732 * Common 'bswap register' helper.
7733 */
7734FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7735{
7736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7737 switch (pVCpu->iem.s.enmEffOpSize)
7738 {
7739 case IEMMODE_16BIT:
7740 IEM_MC_BEGIN(1, 0);
7741 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7742 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7743 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7744 IEM_MC_ADVANCE_RIP();
7745 IEM_MC_END();
7746 return VINF_SUCCESS;
7747
7748 case IEMMODE_32BIT:
7749 IEM_MC_BEGIN(1, 0);
7750 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7751 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7752 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7753 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7754 IEM_MC_ADVANCE_RIP();
7755 IEM_MC_END();
7756 return VINF_SUCCESS;
7757
7758 case IEMMODE_64BIT:
7759 IEM_MC_BEGIN(1, 0);
7760 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7761 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7762 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7763 IEM_MC_ADVANCE_RIP();
7764 IEM_MC_END();
7765 return VINF_SUCCESS;
7766
7767 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7768 }
7769}
7770
7771
7772/** Opcode 0x0f 0xc8. */
7773FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7774{
7775 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7776 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7777 prefix. REX.B is the correct prefix it appears. For a parallel
7778 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7779 IEMOP_HLP_MIN_486();
7780 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7781}
7782
7783
7784/** Opcode 0x0f 0xc9. */
7785FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7786{
7787 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7788 IEMOP_HLP_MIN_486();
7789 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7790}
7791
7792
7793/** Opcode 0x0f 0xca. */
7794FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7795{
7796 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7797 IEMOP_HLP_MIN_486();
7798 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7799}
7800
7801
7802/** Opcode 0x0f 0xcb. */
7803FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7804{
7805 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7806 IEMOP_HLP_MIN_486();
7807 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7808}
7809
7810
7811/** Opcode 0x0f 0xcc. */
7812FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7813{
7814 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7815 IEMOP_HLP_MIN_486();
7816 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7817}
7818
7819
7820/** Opcode 0x0f 0xcd. */
7821FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7822{
7823 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7824 IEMOP_HLP_MIN_486();
7825 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7826}
7827
7828
7829/** Opcode 0x0f 0xce. */
7830FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7831{
7832 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7833 IEMOP_HLP_MIN_486();
7834 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7835}
7836
7837
7838/** Opcode 0x0f 0xcf. */
7839FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7840{
7841 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7842 IEMOP_HLP_MIN_486();
7843 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7844}
7845
7846
7847/* Opcode 0x0f 0xd0 - invalid */
7848/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7849FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7850/* Opcode 0xf3 0x0f 0xd0 - invalid */
7851/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7852FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7853
7854/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7855FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7856/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7857FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7858/* Opcode 0xf3 0x0f 0xd1 - invalid */
7859/* Opcode 0xf2 0x0f 0xd1 - invalid */
7860
7861/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7862FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7863/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7864FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7865/* Opcode 0xf3 0x0f 0xd2 - invalid */
7866/* Opcode 0xf2 0x0f 0xd2 - invalid */
7867
7868/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7869FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7870/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7871FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7872/* Opcode 0xf3 0x0f 0xd3 - invalid */
7873/* Opcode 0xf2 0x0f 0xd3 - invalid */
7874
7875/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7876FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7877/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7878FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7879/* Opcode 0xf3 0x0f 0xd4 - invalid */
7880/* Opcode 0xf2 0x0f 0xd4 - invalid */
7881
7882/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7883FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7884/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7885FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7886/* Opcode 0xf3 0x0f 0xd5 - invalid */
7887/* Opcode 0xf2 0x0f 0xd5 - invalid */
7888
7889/* Opcode 0x0f 0xd6 - invalid */
7890
7891/**
7892 * @opcode 0xd6
7893 * @oppfx 0x66
7894 * @opcpuid sse2
7895 * @opgroup og_sse2_pcksclr_datamove
7896 * @opxcpttype none
7897 * @optest op1=-1 op2=2 -> op1=2
7898 * @optest op1=0 op2=-42 -> op1=-42
7899 */
7900FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
7901{
7902 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
7903 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7904 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7905 {
7906 /*
7907 * Register, register.
7908 */
7909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7910 IEM_MC_BEGIN(0, 2);
7911 IEM_MC_LOCAL(uint64_t, uSrc);
7912
7913 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7914 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7915
7916 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7917 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
7918
7919 IEM_MC_ADVANCE_RIP();
7920 IEM_MC_END();
7921 }
7922 else
7923 {
7924 /*
7925 * Memory, register.
7926 */
7927 IEM_MC_BEGIN(0, 2);
7928 IEM_MC_LOCAL(uint64_t, uSrc);
7929 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7930
7931 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7933 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7934 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7935
7936 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7937 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7938
7939 IEM_MC_ADVANCE_RIP();
7940 IEM_MC_END();
7941 }
7942 return VINF_SUCCESS;
7943}
7944
7945
7946/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7947FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7948/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7949FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7950#if 0
7951FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7952{
7953 /* Docs says register only. */
7954 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7955
7956 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7957 {
7958 case IEM_OP_PRF_SIZE_OP: /* SSE */
7959 I E M O P _ M N E M O N I C(movq_Wq_Vq, "movq Wq,Vq");
7960 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7961 IEM_MC_BEGIN(2, 0);
7962 IEM_MC_ARG(uint64_t *, pDst, 0);
7963 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
7964 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7965 IEM_MC_PREPARE_SSE_USAGE();
7966 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7967 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7968 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7969 IEM_MC_ADVANCE_RIP();
7970 IEM_MC_END();
7971 return VINF_SUCCESS;
7972
7973 case 0: /* MMX */
7974 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7975 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7976 IEM_MC_BEGIN(2, 0);
7977 IEM_MC_ARG(uint64_t *, pDst, 0);
7978 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7979 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7980 IEM_MC_PREPARE_FPU_USAGE();
7981 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7982 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7983 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7984 IEM_MC_ADVANCE_RIP();
7985 IEM_MC_END();
7986 return VINF_SUCCESS;
7987
7988 default:
7989 return IEMOP_RAISE_INVALID_OPCODE();
7990 }
7991}
7992#endif
7993
7994
7995/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
7996FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
7997{
7998 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7999 /** @todo testcase: Check that the instruction implicitly clears the high
8000 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8001 * and opcode modifications are made to work with the whole width (not
8002 * just 128). */
8003 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
8004 /* Docs says register only. */
8005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8006 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8007 {
8008 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8009 IEM_MC_BEGIN(2, 0);
8010 IEM_MC_ARG(uint64_t *, pDst, 0);
8011 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8012 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8013 IEM_MC_PREPARE_FPU_USAGE();
8014 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8015 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8016 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8017 IEM_MC_ADVANCE_RIP();
8018 IEM_MC_END();
8019 return VINF_SUCCESS;
8020 }
8021 return IEMOP_RAISE_INVALID_OPCODE();
8022}
8023
8024/** Opcode 0x66 0x0f 0xd7 - */
8025FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
8026{
8027 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8028 /** @todo testcase: Check that the instruction implicitly clears the high
8029 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8030 * and opcode modifications are made to work with the whole width (not
8031 * just 128). */
8032 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
8033 /* Docs says register only. */
8034 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8035 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8036 {
8037 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8038 IEM_MC_BEGIN(2, 0);
8039 IEM_MC_ARG(uint64_t *, pDst, 0);
8040 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8041 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8042 IEM_MC_PREPARE_SSE_USAGE();
8043 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8044 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8045 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8046 IEM_MC_ADVANCE_RIP();
8047 IEM_MC_END();
8048 return VINF_SUCCESS;
8049 }
8050 return IEMOP_RAISE_INVALID_OPCODE();
8051}
8052
8053/* Opcode 0xf3 0x0f 0xd7 - invalid */
8054/* Opcode 0xf2 0x0f 0xd7 - invalid */
8055
8056
8057/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
8058FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
8059/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
8060FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
8061/* Opcode 0xf3 0x0f 0xd8 - invalid */
8062/* Opcode 0xf2 0x0f 0xd8 - invalid */
8063
8064/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
8065FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
8066/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
8067FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
8068/* Opcode 0xf3 0x0f 0xd9 - invalid */
8069/* Opcode 0xf2 0x0f 0xd9 - invalid */
8070
8071/** Opcode 0x0f 0xda - pminub Pq, Qq */
8072FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
8073/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
8074FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
8075/* Opcode 0xf3 0x0f 0xda - invalid */
8076/* Opcode 0xf2 0x0f 0xda - invalid */
8077
8078/** Opcode 0x0f 0xdb - pand Pq, Qq */
8079FNIEMOP_STUB(iemOp_pand_Pq_Qq);
8080/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
8081FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
8082/* Opcode 0xf3 0x0f 0xdb - invalid */
8083/* Opcode 0xf2 0x0f 0xdb - invalid */
8084
8085/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
8086FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
8087/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
8088FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
8089/* Opcode 0xf3 0x0f 0xdc - invalid */
8090/* Opcode 0xf2 0x0f 0xdc - invalid */
8091
8092/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
8093FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
8094/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
8095FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
8096/* Opcode 0xf3 0x0f 0xdd - invalid */
8097/* Opcode 0xf2 0x0f 0xdd - invalid */
8098
8099/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
8100FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
8101/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
8102FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
8103/* Opcode 0xf3 0x0f 0xde - invalid */
8104/* Opcode 0xf2 0x0f 0xde - invalid */
8105
8106/** Opcode 0x0f 0xdf - pandn Pq, Qq */
8107FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
8108/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
8109FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
8110/* Opcode 0xf3 0x0f 0xdf - invalid */
8111/* Opcode 0xf2 0x0f 0xdf - invalid */
8112
8113/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
8114FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
8115/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
8116FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
8117/* Opcode 0xf3 0x0f 0xe0 - invalid */
8118/* Opcode 0xf2 0x0f 0xe0 - invalid */
8119
8120/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
8121FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
8122/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
8123FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
8124/* Opcode 0xf3 0x0f 0xe1 - invalid */
8125/* Opcode 0xf2 0x0f 0xe1 - invalid */
8126
8127/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
8128FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
8129/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
8130FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
8131/* Opcode 0xf3 0x0f 0xe2 - invalid */
8132/* Opcode 0xf2 0x0f 0xe2 - invalid */
8133
8134/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
8135FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
8136/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
8137FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
8138/* Opcode 0xf3 0x0f 0xe3 - invalid */
8139/* Opcode 0xf2 0x0f 0xe3 - invalid */
8140
8141/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
8142FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
8143/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
8144FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
8145/* Opcode 0xf3 0x0f 0xe4 - invalid */
8146/* Opcode 0xf2 0x0f 0xe4 - invalid */
8147
8148/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
8149FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
8150/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
8151FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
8152/* Opcode 0xf3 0x0f 0xe5 - invalid */
8153/* Opcode 0xf2 0x0f 0xe5 - invalid */
8154
8155/* Opcode 0x0f 0xe6 - invalid */
8156/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
8157FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
8158/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
8159FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
8160/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
8161FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
8162
8163
8164/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
8165FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
8166{
8167 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
8168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8169 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8170 {
8171 /* Register, memory. */
8172 IEM_MC_BEGIN(0, 2);
8173 IEM_MC_LOCAL(uint64_t, uSrc);
8174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8175
8176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8178 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
8179 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8180
8181 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8182 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8183
8184 IEM_MC_ADVANCE_RIP();
8185 IEM_MC_END();
8186 return VINF_SUCCESS;
8187 }
8188 /* The register, register encoding is invalid. */
8189 return IEMOP_RAISE_INVALID_OPCODE();
8190}
8191
8192/** Opcode 0x66 0x0f 0xe7 - vmovntdq Mx, Vx */
8193FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
8194{
8195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8196 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8197 {
8198 /* Register, memory. */
8199 IEMOP_MNEMONIC(vmovntdq_Mx_Vx, "vmovntdq Mx,Vx");
8200 IEM_MC_BEGIN(0, 2);
8201 IEM_MC_LOCAL(RTUINT128U, uSrc);
8202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8203
8204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8206 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8207 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8208
8209 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8210 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8211
8212 IEM_MC_ADVANCE_RIP();
8213 IEM_MC_END();
8214 return VINF_SUCCESS;
8215 }
8216
8217 /* The register, register encoding is invalid. */
8218 return IEMOP_RAISE_INVALID_OPCODE();
8219}
8220
8221/* Opcode 0xf3 0x0f 0xe7 - invalid */
8222/* Opcode 0xf2 0x0f 0xe7 - invalid */
8223
8224
8225/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
8226FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
8227/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
8228FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
8229/* Opcode 0xf3 0x0f 0xe8 - invalid */
8230/* Opcode 0xf2 0x0f 0xe8 - invalid */
8231
8232/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
8233FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
8234/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
8235FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
8236/* Opcode 0xf3 0x0f 0xe9 - invalid */
8237/* Opcode 0xf2 0x0f 0xe9 - invalid */
8238
8239/** Opcode 0x0f 0xea - pminsw Pq, Qq */
8240FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
8241/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
8242FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
8243/* Opcode 0xf3 0x0f 0xea - invalid */
8244/* Opcode 0xf2 0x0f 0xea - invalid */
8245
8246/** Opcode 0x0f 0xeb - por Pq, Qq */
8247FNIEMOP_STUB(iemOp_por_Pq_Qq);
8248/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
8249FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
8250/* Opcode 0xf3 0x0f 0xeb - invalid */
8251/* Opcode 0xf2 0x0f 0xeb - invalid */
8252
8253/** Opcode 0x0f 0xec - paddsb Pq, Qq */
8254FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
8255/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
8256FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
8257/* Opcode 0xf3 0x0f 0xec - invalid */
8258/* Opcode 0xf2 0x0f 0xec - invalid */
8259
8260/** Opcode 0x0f 0xed - paddsw Pq, Qq */
8261FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
8262/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
8263FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
8264/* Opcode 0xf3 0x0f 0xed - invalid */
8265/* Opcode 0xf2 0x0f 0xed - invalid */
8266
8267/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
8268FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
8269/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
8270FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
8271/* Opcode 0xf3 0x0f 0xee - invalid */
8272/* Opcode 0xf2 0x0f 0xee - invalid */
8273
8274
8275/** Opcode 0x0f 0xef - pxor Pq, Qq */
8276FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
8277{
8278 IEMOP_MNEMONIC(pxor, "pxor");
8279 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
8280}
8281
8282/** Opcode 0x66 0x0f 0xef - vpxor Vx, Hx, Wx */
8283FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
8284{
8285 IEMOP_MNEMONIC(vpxor, "vpxor");
8286 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8287}
8288
8289/* Opcode 0xf3 0x0f 0xef - invalid */
8290/* Opcode 0xf2 0x0f 0xef - invalid */
8291
8292/* Opcode 0x0f 0xf0 - invalid */
8293/* Opcode 0x66 0x0f 0xf0 - invalid */
8294/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
8295FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
8296
8297/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8298FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8299/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
8300FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
8301/* Opcode 0xf2 0x0f 0xf1 - invalid */
8302
8303/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8304FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8305/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
8306FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
8307/* Opcode 0xf2 0x0f 0xf2 - invalid */
8308
8309/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8310FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8311/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
8312FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
8313/* Opcode 0xf2 0x0f 0xf3 - invalid */
8314
8315/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8316FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8317/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
8318FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
8319/* Opcode 0xf2 0x0f 0xf4 - invalid */
8320
8321/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8322FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
8323/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
8324FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
8325/* Opcode 0xf2 0x0f 0xf5 - invalid */
8326
8327/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
8328FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
8329/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
8330FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
8331/* Opcode 0xf2 0x0f 0xf6 - invalid */
8332
8333/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
8334FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
8335/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
8336FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
8337/* Opcode 0xf2 0x0f 0xf7 - invalid */
8338
8339/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
8340FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
8341/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
8342FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
8343/* Opcode 0xf2 0x0f 0xf8 - invalid */
8344
8345/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
8346FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
8347/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
8348FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
8349/* Opcode 0xf2 0x0f 0xf9 - invalid */
8350
8351/** Opcode 0x0f 0xfa - psubd Pq, Qq */
8352FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
8353/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
8354FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
8355/* Opcode 0xf2 0x0f 0xfa - invalid */
8356
8357/** Opcode 0x0f 0xfb - psubq Pq, Qq */
8358FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
8359/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
8360FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
8361/* Opcode 0xf2 0x0f 0xfb - invalid */
8362
8363/** Opcode 0x0f 0xfc - paddb Pq, Qq */
8364FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
8365/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
8366FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
8367/* Opcode 0xf2 0x0f 0xfc - invalid */
8368
8369/** Opcode 0x0f 0xfd - paddw Pq, Qq */
8370FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
8371/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
8372FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
8373/* Opcode 0xf2 0x0f 0xfd - invalid */
8374
8375/** Opcode 0x0f 0xfe - paddd Pq, Qq */
8376FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
8377/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
8378FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
8379/* Opcode 0xf2 0x0f 0xfe - invalid */
8380
8381
8382/** Opcode **** 0x0f 0xff - UD0 */
8383FNIEMOP_DEF(iemOp_ud0)
8384{
8385 IEMOP_MNEMONIC(ud0, "ud0");
8386 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
8387 {
8388 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
8389#ifndef TST_IEM_CHECK_MC
8390 RTGCPTR GCPtrEff;
8391 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
8392 if (rcStrict != VINF_SUCCESS)
8393 return rcStrict;
8394#endif
8395 IEMOP_HLP_DONE_DECODING();
8396 }
8397 return IEMOP_RAISE_INVALID_OPCODE();
8398}
8399
8400
8401
8402/**
8403 * Two byte opcode map, first byte 0x0f.
8404 *
8405 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
8406 * check if it needs updating as well when making changes.
8407 */
8408IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
8409{
8410 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8411 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
8412 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
8413 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
8414 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
8415 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
8416 /* 0x05 */ IEMOP_X4(iemOp_syscall),
8417 /* 0x06 */ IEMOP_X4(iemOp_clts),
8418 /* 0x07 */ IEMOP_X4(iemOp_sysret),
8419 /* 0x08 */ IEMOP_X4(iemOp_invd),
8420 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
8421 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
8422 /* 0x0b */ IEMOP_X4(iemOp_ud2),
8423 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
8424 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
8425 /* 0x0e */ IEMOP_X4(iemOp_femms),
8426 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
8427
8428 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
8429 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
8430 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
8431 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8432 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8433 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8434 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
8435 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8436 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
8437 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
8438 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
8439 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
8440 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
8441 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
8442 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
8443 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
8444
8445 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8446 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8447 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8448 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8449 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8450 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8451 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8452 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8453 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8454 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8455 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
8456 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8457 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
8458 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
8459 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8460 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8461
8462 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8463 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8464 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8465 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8466 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8467 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8468 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8469 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8470 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
8471 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8472 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
8473 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8474 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8475 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8476 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8477 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8478
8479 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
8480 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
8481 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
8482 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
8483 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
8484 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
8485 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
8486 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
8487 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
8488 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
8489 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8490 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8491 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8492 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8493 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8494 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8495
8496 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8497 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
8498 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8499 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8500 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8501 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8502 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8503 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8504 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8505 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8506 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8507 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8508 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8509 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8510 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8511 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8512
8513 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8514 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8515 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8516 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8517 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8518 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8519 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8520 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8521 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8522 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8523 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8524 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8525 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8526 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8527 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8528 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
8529
8530 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
8531 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8532 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8533 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8534 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8535 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8536 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8537 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8538
8539 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8540 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8541 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8542 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8543 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8544 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8545 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
8546 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
8547
8548 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8549 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8550 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8551 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8552 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8553 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8554 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8555 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8556 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8557 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8558 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8559 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8560 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8561 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8562 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8563 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8564
8565 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8566 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8567 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8568 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8569 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8570 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8571 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8572 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8573 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8574 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8575 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8576 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8577 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8578 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8579 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8580 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8581
8582 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8583 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8584 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8585 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8586 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8587 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8588 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8589 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8590 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8591 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8592 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8593 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8594 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8595 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8596 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8597 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8598
8599 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8600 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8601 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8602 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8603 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8604 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8605 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8606 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8607 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8608 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8609 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8610 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8611 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8612 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8613 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8614 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8615
8616 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8617 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8618 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8619 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8620 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8621 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8622 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8623 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8624 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8625 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8626 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8627 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8628 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8629 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8630 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8631 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8632
8633 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8634 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8635 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8636 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8637 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8638 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8639 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8640 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8641 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8642 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8643 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8644 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8645 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8646 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8647 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8648 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8649
8650 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8651 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8652 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8653 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8654 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8655 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8656 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8657 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8658 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8659 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8660 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8661 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8662 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8663 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8664 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8665 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8666
8667 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8668 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8669 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8670 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8671 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8672 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8673 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8674 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8675 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8676 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8677 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8678 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8679 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8680 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8681 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8682 /* 0xff */ IEMOP_X4(iemOp_ud0),
8683};
8684AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8685
8686
8687/**
8688 * VEX opcode map \#1.
8689 *
8690 * @remarks This is (currently) a subset of g_apfnTwoByteMap, so please check if
8691 * it it needs updating too when making changes.
8692 */
8693IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =
8694{
8695 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8696 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
8697 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
8698 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
8699 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
8700 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
8701 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
8702 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
8703 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
8704 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
8705 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
8706 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
8707 /* 0x0b */ IEMOP_X4(iemOp_InvalidNeedRM),
8708 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
8709 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
8710 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
8711 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
8712
8713 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
8714 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
8715 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
8716 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8717 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8718 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8719 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
8720 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8721 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
8722 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
8723 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
8724 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
8725 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
8726 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
8727 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
8728 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
8729
8730 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
8731 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
8732 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
8733 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
8734 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
8735 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
8736 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
8737 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
8738 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8739 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8740 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
8741 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8742 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
8743 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
8744 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8745 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8746
8747 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
8748 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
8749 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
8750 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
8751 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
8752 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
8753 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
8754 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
8755 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8756 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8757 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8758 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8759 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8760 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8761 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8762 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8763
8764 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
8765 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
8766 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
8767 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
8768 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
8769 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
8770 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
8771 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
8772 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
8773 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
8774 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
8775 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
8776 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
8777 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
8778 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
8779 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
8780
8781 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8782 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
8783 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8784 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8785 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8786 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8787 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8788 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8789 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8790 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8791 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8792 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8793 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8794 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8795 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8796 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8797
8798 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8799 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8800 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8801 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8802 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8803 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8804 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8805 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8806 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8807 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8808 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8809 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8810 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8811 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8812 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8813 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
8814
8815 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
8816 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_Grp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8817 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_Grp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8818 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_Grp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8819 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8820 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8821 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8822 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8823 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
8824 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
8825 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
8826 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
8827 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8828 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8829 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
8830 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
8831
8832 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
8833 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
8834 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
8835 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
8836 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
8837 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
8838 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
8839 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
8840 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
8841 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
8842 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
8843 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
8844 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
8845 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
8846 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
8847 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
8848
8849 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
8850 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
8851 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
8852 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
8853 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
8854 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
8855 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
8856 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
8857 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
8858 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
8859 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
8860 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
8861 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
8862 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
8863 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
8864 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
8865
8866 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8867 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8868 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8869 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8870 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8871 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8872 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8873 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8874 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8875 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8876 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
8877 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
8878 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
8879 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
8880 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
8881 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
8882
8883 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8884 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8885 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8886 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8887 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8888 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8889 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8890 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8891 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8892 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8893 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
8894 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
8895 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
8896 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
8897 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
8898 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
8899
8900 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8901 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8902 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8903 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8904 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8905 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8906 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8907 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8908 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8909 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8910 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
8911 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
8912 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
8913 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
8914 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
8915 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
8916
8917 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8918 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8919 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8920 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8921 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8922 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8923 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8924 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8925 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8926 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8927 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8928 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8929 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8930 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8931 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8932 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8933
8934 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8935 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8936 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8937 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8938 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8939 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8940 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8941 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8942 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8943 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8944 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8945 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8946 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8947 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8948 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8949 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8950
8951 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8952 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8953 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8954 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8955 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8956 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8957 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8958 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8959 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8960 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8961 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8962 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8963 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8964 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8965 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8966 /* 0xff */ IEMOP_X4(iemOp_ud0),
8967};
8968AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
8969/** @} */
8970
8971
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette