VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 66469

Last change on this file since 66469 was 66469, checked in by vboxsync, 8 years ago

IEM: More VEX work. Fixed punpcklbw_Vx_Wx.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 306.1 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 66469 2017-04-07 09:32:59Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 switch (pVCpu->iem.s.enmEffOpSize)
38 {
39 case IEMMODE_16BIT:
40 IEM_MC_BEGIN(0, 1);
41 IEM_MC_LOCAL(uint16_t, u16Ldtr);
42 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
43 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
44 IEM_MC_ADVANCE_RIP();
45 IEM_MC_END();
46 break;
47
48 case IEMMODE_32BIT:
49 IEM_MC_BEGIN(0, 1);
50 IEM_MC_LOCAL(uint32_t, u32Ldtr);
51 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
52 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
53 IEM_MC_ADVANCE_RIP();
54 IEM_MC_END();
55 break;
56
57 case IEMMODE_64BIT:
58 IEM_MC_BEGIN(0, 1);
59 IEM_MC_LOCAL(uint64_t, u64Ldtr);
60 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
61 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
62 IEM_MC_ADVANCE_RIP();
63 IEM_MC_END();
64 break;
65
66 IEM_NOT_REACHED_DEFAULT_CASE_RET();
67 }
68 }
69 else
70 {
71 IEM_MC_BEGIN(0, 2);
72 IEM_MC_LOCAL(uint16_t, u16Ldtr);
73 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
74 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
75 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
76 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
77 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
78 IEM_MC_ADVANCE_RIP();
79 IEM_MC_END();
80 }
81 return VINF_SUCCESS;
82}
83
84
85/** Opcode 0x0f 0x00 /1. */
86FNIEMOPRM_DEF(iemOp_Grp6_str)
87{
88 IEMOP_MNEMONIC(str, "str Rv/Mw");
89 IEMOP_HLP_MIN_286();
90 IEMOP_HLP_NO_REAL_OR_V86_MODE();
91
92 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
93 {
94 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
95 switch (pVCpu->iem.s.enmEffOpSize)
96 {
97 case IEMMODE_16BIT:
98 IEM_MC_BEGIN(0, 1);
99 IEM_MC_LOCAL(uint16_t, u16Tr);
100 IEM_MC_FETCH_TR_U16(u16Tr);
101 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
102 IEM_MC_ADVANCE_RIP();
103 IEM_MC_END();
104 break;
105
106 case IEMMODE_32BIT:
107 IEM_MC_BEGIN(0, 1);
108 IEM_MC_LOCAL(uint32_t, u32Tr);
109 IEM_MC_FETCH_TR_U32(u32Tr);
110 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
111 IEM_MC_ADVANCE_RIP();
112 IEM_MC_END();
113 break;
114
115 case IEMMODE_64BIT:
116 IEM_MC_BEGIN(0, 1);
117 IEM_MC_LOCAL(uint64_t, u64Tr);
118 IEM_MC_FETCH_TR_U64(u64Tr);
119 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
120 IEM_MC_ADVANCE_RIP();
121 IEM_MC_END();
122 break;
123
124 IEM_NOT_REACHED_DEFAULT_CASE_RET();
125 }
126 }
127 else
128 {
129 IEM_MC_BEGIN(0, 2);
130 IEM_MC_LOCAL(uint16_t, u16Tr);
131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
133 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
134 IEM_MC_FETCH_TR_U16(u16Tr);
135 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
136 IEM_MC_ADVANCE_RIP();
137 IEM_MC_END();
138 }
139 return VINF_SUCCESS;
140}
141
142
143/** Opcode 0x0f 0x00 /2. */
144FNIEMOPRM_DEF(iemOp_Grp6_lldt)
145{
146 IEMOP_MNEMONIC(lldt, "lldt Ew");
147 IEMOP_HLP_MIN_286();
148 IEMOP_HLP_NO_REAL_OR_V86_MODE();
149
150 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
151 {
152 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
153 IEM_MC_BEGIN(1, 0);
154 IEM_MC_ARG(uint16_t, u16Sel, 0);
155 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
156 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
157 IEM_MC_END();
158 }
159 else
160 {
161 IEM_MC_BEGIN(1, 1);
162 IEM_MC_ARG(uint16_t, u16Sel, 0);
163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
165 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
166 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
167 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
168 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
169 IEM_MC_END();
170 }
171 return VINF_SUCCESS;
172}
173
174
175/** Opcode 0x0f 0x00 /3. */
176FNIEMOPRM_DEF(iemOp_Grp6_ltr)
177{
178 IEMOP_MNEMONIC(ltr, "ltr Ew");
179 IEMOP_HLP_MIN_286();
180 IEMOP_HLP_NO_REAL_OR_V86_MODE();
181
182 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
183 {
184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
185 IEM_MC_BEGIN(1, 0);
186 IEM_MC_ARG(uint16_t, u16Sel, 0);
187 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
188 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
189 IEM_MC_END();
190 }
191 else
192 {
193 IEM_MC_BEGIN(1, 1);
194 IEM_MC_ARG(uint16_t, u16Sel, 0);
195 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
198 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
199 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
200 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
201 IEM_MC_END();
202 }
203 return VINF_SUCCESS;
204}
205
206
207/** Opcode 0x0f 0x00 /3. */
208FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
209{
210 IEMOP_HLP_MIN_286();
211 IEMOP_HLP_NO_REAL_OR_V86_MODE();
212
213 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
214 {
215 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
216 IEM_MC_BEGIN(2, 0);
217 IEM_MC_ARG(uint16_t, u16Sel, 0);
218 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
219 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
220 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
221 IEM_MC_END();
222 }
223 else
224 {
225 IEM_MC_BEGIN(2, 1);
226 IEM_MC_ARG(uint16_t, u16Sel, 0);
227 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
230 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
231 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
232 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
233 IEM_MC_END();
234 }
235 return VINF_SUCCESS;
236}
237
238
239/** Opcode 0x0f 0x00 /4. */
240FNIEMOPRM_DEF(iemOp_Grp6_verr)
241{
242 IEMOP_MNEMONIC(verr, "verr Ew");
243 IEMOP_HLP_MIN_286();
244 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
245}
246
247
248/** Opcode 0x0f 0x00 /5. */
249FNIEMOPRM_DEF(iemOp_Grp6_verw)
250{
251 IEMOP_MNEMONIC(verw, "verw Ew");
252 IEMOP_HLP_MIN_286();
253 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
254}
255
256
257/**
258 * Group 6 jump table.
259 */
260IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
261{
262 iemOp_Grp6_sldt,
263 iemOp_Grp6_str,
264 iemOp_Grp6_lldt,
265 iemOp_Grp6_ltr,
266 iemOp_Grp6_verr,
267 iemOp_Grp6_verw,
268 iemOp_InvalidWithRM,
269 iemOp_InvalidWithRM
270};
271
272/** Opcode 0x0f 0x00. */
273FNIEMOP_DEF(iemOp_Grp6)
274{
275 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
276 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
277}
278
279
280/** Opcode 0x0f 0x01 /0. */
281FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
282{
283 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
284 IEMOP_HLP_MIN_286();
285 IEMOP_HLP_64BIT_OP_SIZE();
286 IEM_MC_BEGIN(2, 1);
287 IEM_MC_ARG(uint8_t, iEffSeg, 0);
288 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
291 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
292 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
293 IEM_MC_END();
294 return VINF_SUCCESS;
295}
296
297
298/** Opcode 0x0f 0x01 /0. */
299FNIEMOP_DEF(iemOp_Grp7_vmcall)
300{
301 IEMOP_BITCH_ABOUT_STUB();
302 return IEMOP_RAISE_INVALID_OPCODE();
303}
304
305
306/** Opcode 0x0f 0x01 /0. */
307FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
308{
309 IEMOP_BITCH_ABOUT_STUB();
310 return IEMOP_RAISE_INVALID_OPCODE();
311}
312
313
314/** Opcode 0x0f 0x01 /0. */
315FNIEMOP_DEF(iemOp_Grp7_vmresume)
316{
317 IEMOP_BITCH_ABOUT_STUB();
318 return IEMOP_RAISE_INVALID_OPCODE();
319}
320
321
322/** Opcode 0x0f 0x01 /0. */
323FNIEMOP_DEF(iemOp_Grp7_vmxoff)
324{
325 IEMOP_BITCH_ABOUT_STUB();
326 return IEMOP_RAISE_INVALID_OPCODE();
327}
328
329
330/** Opcode 0x0f 0x01 /1. */
331FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
332{
333 IEMOP_MNEMONIC(sidt, "sidt Ms");
334 IEMOP_HLP_MIN_286();
335 IEMOP_HLP_64BIT_OP_SIZE();
336 IEM_MC_BEGIN(2, 1);
337 IEM_MC_ARG(uint8_t, iEffSeg, 0);
338 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
341 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
342 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
343 IEM_MC_END();
344 return VINF_SUCCESS;
345}
346
347
348/** Opcode 0x0f 0x01 /1. */
349FNIEMOP_DEF(iemOp_Grp7_monitor)
350{
351 IEMOP_MNEMONIC(monitor, "monitor");
352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
353 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
354}
355
356
357/** Opcode 0x0f 0x01 /1. */
358FNIEMOP_DEF(iemOp_Grp7_mwait)
359{
360 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
362 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
363}
364
365
366/** Opcode 0x0f 0x01 /2. */
367FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
368{
369 IEMOP_MNEMONIC(lgdt, "lgdt");
370 IEMOP_HLP_64BIT_OP_SIZE();
371 IEM_MC_BEGIN(3, 1);
372 IEM_MC_ARG(uint8_t, iEffSeg, 0);
373 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
374 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
377 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
378 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
379 IEM_MC_END();
380 return VINF_SUCCESS;
381}
382
383
384/** Opcode 0x0f 0x01 0xd0. */
385FNIEMOP_DEF(iemOp_Grp7_xgetbv)
386{
387 IEMOP_MNEMONIC(xgetbv, "xgetbv");
388 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
389 {
390 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
391 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
392 }
393 return IEMOP_RAISE_INVALID_OPCODE();
394}
395
396
397/** Opcode 0x0f 0x01 0xd1. */
398FNIEMOP_DEF(iemOp_Grp7_xsetbv)
399{
400 IEMOP_MNEMONIC(xsetbv, "xsetbv");
401 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
402 {
403 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
404 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
405 }
406 return IEMOP_RAISE_INVALID_OPCODE();
407}
408
409
410/** Opcode 0x0f 0x01 /3. */
411FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
412{
413 IEMOP_MNEMONIC(lidt, "lidt");
414 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
415 ? IEMMODE_64BIT
416 : pVCpu->iem.s.enmEffOpSize;
417 IEM_MC_BEGIN(3, 1);
418 IEM_MC_ARG(uint8_t, iEffSeg, 0);
419 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
420 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
423 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
424 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
425 IEM_MC_END();
426 return VINF_SUCCESS;
427}
428
429
430#ifdef VBOX_WITH_NESTED_HWVIRT
431/** Opcode 0x0f 0x01 0xd8. */
432FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
433{
434 IEMOP_MNEMONIC(vmrun, "vmrun");
435 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
436}
437
438/** Opcode 0x0f 0x01 0xd9. */
439FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
440{
441 IEMOP_MNEMONIC(vmmcall, "vmmcall");
442 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
443}
444
445
446/** Opcode 0x0f 0x01 0xda. */
447FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
448{
449 IEMOP_MNEMONIC(vmload, "vmload");
450 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
451}
452
453
454/** Opcode 0x0f 0x01 0xdb. */
455FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
456{
457 IEMOP_MNEMONIC(vmsave, "vmsave");
458 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
459}
460
461
462/** Opcode 0x0f 0x01 0xdc. */
463FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
464{
465 IEMOP_MNEMONIC(stgi, "stgi");
466 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
467}
468
469
470/** Opcode 0x0f 0x01 0xdd. */
471FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
472{
473 IEMOP_MNEMONIC(clgi, "clgi");
474 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
475}
476
477
478/** Opcode 0x0f 0x01 0xdf. */
479FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
480{
481 IEMOP_MNEMONIC(invlpga, "invlpga");
482 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
483}
484#else
485/** Opcode 0x0f 0x01 0xd8. */
486FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
487
488/** Opcode 0x0f 0x01 0xd9. */
489FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
490
491/** Opcode 0x0f 0x01 0xda. */
492FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
493
494/** Opcode 0x0f 0x01 0xdb. */
495FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
496
497/** Opcode 0x0f 0x01 0xdc. */
498FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
499
500/** Opcode 0x0f 0x01 0xdd. */
501FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
502
503/** Opcode 0x0f 0x01 0xdf. */
504FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
505#endif /* VBOX_WITH_NESTED_HWVIRT */
506
507/** Opcode 0x0f 0x01 0xde. */
508FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
509
510/** Opcode 0x0f 0x01 /4. */
511FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
512{
513 IEMOP_MNEMONIC(smsw, "smsw");
514 IEMOP_HLP_MIN_286();
515 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
516 {
517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
518 switch (pVCpu->iem.s.enmEffOpSize)
519 {
520 case IEMMODE_16BIT:
521 IEM_MC_BEGIN(0, 1);
522 IEM_MC_LOCAL(uint16_t, u16Tmp);
523 IEM_MC_FETCH_CR0_U16(u16Tmp);
524 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
525 { /* likely */ }
526 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
527 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
528 else
529 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
530 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
531 IEM_MC_ADVANCE_RIP();
532 IEM_MC_END();
533 return VINF_SUCCESS;
534
535 case IEMMODE_32BIT:
536 IEM_MC_BEGIN(0, 1);
537 IEM_MC_LOCAL(uint32_t, u32Tmp);
538 IEM_MC_FETCH_CR0_U32(u32Tmp);
539 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
540 IEM_MC_ADVANCE_RIP();
541 IEM_MC_END();
542 return VINF_SUCCESS;
543
544 case IEMMODE_64BIT:
545 IEM_MC_BEGIN(0, 1);
546 IEM_MC_LOCAL(uint64_t, u64Tmp);
547 IEM_MC_FETCH_CR0_U64(u64Tmp);
548 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
549 IEM_MC_ADVANCE_RIP();
550 IEM_MC_END();
551 return VINF_SUCCESS;
552
553 IEM_NOT_REACHED_DEFAULT_CASE_RET();
554 }
555 }
556 else
557 {
558 /* Ignore operand size here, memory refs are always 16-bit. */
559 IEM_MC_BEGIN(0, 2);
560 IEM_MC_LOCAL(uint16_t, u16Tmp);
561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
564 IEM_MC_FETCH_CR0_U16(u16Tmp);
565 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
566 { /* likely */ }
567 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
568 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
569 else
570 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
571 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
572 IEM_MC_ADVANCE_RIP();
573 IEM_MC_END();
574 return VINF_SUCCESS;
575 }
576}
577
578
579/** Opcode 0x0f 0x01 /6. */
580FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
581{
582 /* The operand size is effectively ignored, all is 16-bit and only the
583 lower 3-bits are used. */
584 IEMOP_MNEMONIC(lmsw, "lmsw");
585 IEMOP_HLP_MIN_286();
586 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
587 {
588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
589 IEM_MC_BEGIN(1, 0);
590 IEM_MC_ARG(uint16_t, u16Tmp, 0);
591 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
592 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
593 IEM_MC_END();
594 }
595 else
596 {
597 IEM_MC_BEGIN(1, 1);
598 IEM_MC_ARG(uint16_t, u16Tmp, 0);
599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
602 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
603 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
604 IEM_MC_END();
605 }
606 return VINF_SUCCESS;
607}
608
609
610/** Opcode 0x0f 0x01 /7. */
611FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
612{
613 IEMOP_MNEMONIC(invlpg, "invlpg");
614 IEMOP_HLP_MIN_486();
615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
616 IEM_MC_BEGIN(1, 1);
617 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
619 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
620 IEM_MC_END();
621 return VINF_SUCCESS;
622}
623
624
625/** Opcode 0x0f 0x01 /7. */
626FNIEMOP_DEF(iemOp_Grp7_swapgs)
627{
628 IEMOP_MNEMONIC(swapgs, "swapgs");
629 IEMOP_HLP_ONLY_64BIT();
630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
631 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
632}
633
634
635/** Opcode 0x0f 0x01 /7. */
636FNIEMOP_DEF(iemOp_Grp7_rdtscp)
637{
638 NOREF(pVCpu);
639 IEMOP_BITCH_ABOUT_STUB();
640 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
641}
642
643
644/**
645 * Group 7 jump table, memory variant.
646 */
647IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
648{
649 iemOp_Grp7_sgdt,
650 iemOp_Grp7_sidt,
651 iemOp_Grp7_lgdt,
652 iemOp_Grp7_lidt,
653 iemOp_Grp7_smsw,
654 iemOp_InvalidWithRM,
655 iemOp_Grp7_lmsw,
656 iemOp_Grp7_invlpg
657};
658
659
660/** Opcode 0x0f 0x01. */
661FNIEMOP_DEF(iemOp_Grp7)
662{
663 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
664 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
665 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
666
667 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
668 {
669 case 0:
670 switch (bRm & X86_MODRM_RM_MASK)
671 {
672 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
673 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
674 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
675 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
676 }
677 return IEMOP_RAISE_INVALID_OPCODE();
678
679 case 1:
680 switch (bRm & X86_MODRM_RM_MASK)
681 {
682 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
683 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
684 }
685 return IEMOP_RAISE_INVALID_OPCODE();
686
687 case 2:
688 switch (bRm & X86_MODRM_RM_MASK)
689 {
690 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
691 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
692 }
693 return IEMOP_RAISE_INVALID_OPCODE();
694
695 case 3:
696 switch (bRm & X86_MODRM_RM_MASK)
697 {
698 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
699 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
700 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
701 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
702 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
703 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
704 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
705 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
706 IEM_NOT_REACHED_DEFAULT_CASE_RET();
707 }
708
709 case 4:
710 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
711
712 case 5:
713 return IEMOP_RAISE_INVALID_OPCODE();
714
715 case 6:
716 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
717
718 case 7:
719 switch (bRm & X86_MODRM_RM_MASK)
720 {
721 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
722 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
723 }
724 return IEMOP_RAISE_INVALID_OPCODE();
725
726 IEM_NOT_REACHED_DEFAULT_CASE_RET();
727 }
728}
729
730/** Opcode 0x0f 0x00 /3. */
731FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
732{
733 IEMOP_HLP_NO_REAL_OR_V86_MODE();
734 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
735
736 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
737 {
738 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
739 switch (pVCpu->iem.s.enmEffOpSize)
740 {
741 case IEMMODE_16BIT:
742 {
743 IEM_MC_BEGIN(3, 0);
744 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
745 IEM_MC_ARG(uint16_t, u16Sel, 1);
746 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
747
748 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
749 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
750 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
751
752 IEM_MC_END();
753 return VINF_SUCCESS;
754 }
755
756 case IEMMODE_32BIT:
757 case IEMMODE_64BIT:
758 {
759 IEM_MC_BEGIN(3, 0);
760 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
761 IEM_MC_ARG(uint16_t, u16Sel, 1);
762 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
763
764 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
765 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
766 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
767
768 IEM_MC_END();
769 return VINF_SUCCESS;
770 }
771
772 IEM_NOT_REACHED_DEFAULT_CASE_RET();
773 }
774 }
775 else
776 {
777 switch (pVCpu->iem.s.enmEffOpSize)
778 {
779 case IEMMODE_16BIT:
780 {
781 IEM_MC_BEGIN(3, 1);
782 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
783 IEM_MC_ARG(uint16_t, u16Sel, 1);
784 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
786
787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
788 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
789
790 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
791 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
792 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
793
794 IEM_MC_END();
795 return VINF_SUCCESS;
796 }
797
798 case IEMMODE_32BIT:
799 case IEMMODE_64BIT:
800 {
801 IEM_MC_BEGIN(3, 1);
802 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
803 IEM_MC_ARG(uint16_t, u16Sel, 1);
804 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
806
807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
808 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
809/** @todo testcase: make sure it's a 16-bit read. */
810
811 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
812 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
813 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
814
815 IEM_MC_END();
816 return VINF_SUCCESS;
817 }
818
819 IEM_NOT_REACHED_DEFAULT_CASE_RET();
820 }
821 }
822}
823
824
825
826/** Opcode 0x0f 0x02. */
827FNIEMOP_DEF(iemOp_lar_Gv_Ew)
828{
829 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
830 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
831}
832
833
834/** Opcode 0x0f 0x03. */
835FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
836{
837 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
838 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
839}
840
841
842/** Opcode 0x0f 0x05. */
843FNIEMOP_DEF(iemOp_syscall)
844{
845 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
847 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
848}
849
850
851/** Opcode 0x0f 0x06. */
852FNIEMOP_DEF(iemOp_clts)
853{
854 IEMOP_MNEMONIC(clts, "clts");
855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
856 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
857}
858
859
860/** Opcode 0x0f 0x07. */
861FNIEMOP_DEF(iemOp_sysret)
862{
863 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
865 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
866}
867
868
869/** Opcode 0x0f 0x08. */
870FNIEMOP_STUB(iemOp_invd);
871// IEMOP_HLP_MIN_486();
872
873
874/** Opcode 0x0f 0x09. */
875FNIEMOP_DEF(iemOp_wbinvd)
876{
877 IEMOP_MNEMONIC(wbinvd, "wbinvd");
878 IEMOP_HLP_MIN_486();
879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
880 IEM_MC_BEGIN(0, 0);
881 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
882 IEM_MC_ADVANCE_RIP();
883 IEM_MC_END();
884 return VINF_SUCCESS; /* ignore for now */
885}
886
887
888/** Opcode 0x0f 0x0b. */
889FNIEMOP_DEF(iemOp_ud2)
890{
891 IEMOP_MNEMONIC(ud2, "ud2");
892 return IEMOP_RAISE_INVALID_OPCODE();
893}
894
895/** Opcode 0x0f 0x0d. */
896FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
897{
898 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
899 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
900 {
901 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
902 return IEMOP_RAISE_INVALID_OPCODE();
903 }
904
905 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
906 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
907 {
908 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
909 return IEMOP_RAISE_INVALID_OPCODE();
910 }
911
912 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
913 {
914 case 2: /* Aliased to /0 for the time being. */
915 case 4: /* Aliased to /0 for the time being. */
916 case 5: /* Aliased to /0 for the time being. */
917 case 6: /* Aliased to /0 for the time being. */
918 case 7: /* Aliased to /0 for the time being. */
919 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
920 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
921 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
922 IEM_NOT_REACHED_DEFAULT_CASE_RET();
923 }
924
925 IEM_MC_BEGIN(0, 1);
926 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
929 /* Currently a NOP. */
930 NOREF(GCPtrEffSrc);
931 IEM_MC_ADVANCE_RIP();
932 IEM_MC_END();
933 return VINF_SUCCESS;
934}
935
936
937/** Opcode 0x0f 0x0e. */
938FNIEMOP_STUB(iemOp_femms);
939
940
941/** Opcode 0x0f 0x0f 0x0c. */
942FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
943
944/** Opcode 0x0f 0x0f 0x0d. */
945FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
946
947/** Opcode 0x0f 0x0f 0x1c. */
948FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
949
950/** Opcode 0x0f 0x0f 0x1d. */
951FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
952
953/** Opcode 0x0f 0x0f 0x8a. */
954FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
955
956/** Opcode 0x0f 0x0f 0x8e. */
957FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
958
959/** Opcode 0x0f 0x0f 0x90. */
960FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
961
962/** Opcode 0x0f 0x0f 0x94. */
963FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
964
965/** Opcode 0x0f 0x0f 0x96. */
966FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
967
968/** Opcode 0x0f 0x0f 0x97. */
969FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
970
971/** Opcode 0x0f 0x0f 0x9a. */
972FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
973
974/** Opcode 0x0f 0x0f 0x9e. */
975FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
976
977/** Opcode 0x0f 0x0f 0xa0. */
978FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
979
980/** Opcode 0x0f 0x0f 0xa4. */
981FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
982
983/** Opcode 0x0f 0x0f 0xa6. */
984FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
985
986/** Opcode 0x0f 0x0f 0xa7. */
987FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
988
989/** Opcode 0x0f 0x0f 0xaa. */
990FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
991
992/** Opcode 0x0f 0x0f 0xae. */
993FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
994
995/** Opcode 0x0f 0x0f 0xb0. */
996FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
997
998/** Opcode 0x0f 0x0f 0xb4. */
999FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1000
1001/** Opcode 0x0f 0x0f 0xb6. */
1002FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1003
1004/** Opcode 0x0f 0x0f 0xb7. */
1005FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1006
1007/** Opcode 0x0f 0x0f 0xbb. */
1008FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1009
1010/** Opcode 0x0f 0x0f 0xbf. */
1011FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1012
1013
1014/** Opcode 0x0f 0x0f. */
1015FNIEMOP_DEF(iemOp_3Dnow)
1016{
1017 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1018 {
1019 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1020 return IEMOP_RAISE_INVALID_OPCODE();
1021 }
1022
1023 /* This is pretty sparse, use switch instead of table. */
1024 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1025 switch (b)
1026 {
1027 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1028 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1029 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1030 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1031 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1032 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1033 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1034 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1035 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1036 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1037 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1038 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1039 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1040 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1041 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1042 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1043 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1044 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1045 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1046 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1047 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1048 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1049 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1050 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1051 default:
1052 return IEMOP_RAISE_INVALID_OPCODE();
1053 }
1054}
1055
1056
1057/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1058FNIEMOP_STUB(iemOp_movups_Vps_Wps);
1059/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1060FNIEMOP_STUB(iemOp_movupd_Vpd_Wpd);
1061
1062
1063/**
1064 * @opcode 0x10
1065 * @oppfx 0xf3
1066 * @opcpuid sse
1067 * @opgroup og_sse_simdfp_datamove
1068 * @opxcpttype 5
1069 * @optest op1=1 op2=2 -> op1=2
1070 * @optest op1=0 op2=-22 -> op1=-22
1071 */
1072FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1073{
1074 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZxReg, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1076 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1077 {
1078 /*
1079 * Register, register.
1080 */
1081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1082 IEM_MC_BEGIN(0, 1);
1083 IEM_MC_LOCAL(uint32_t, uSrc);
1084
1085 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1086 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1087 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1088 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1089
1090 IEM_MC_ADVANCE_RIP();
1091 IEM_MC_END();
1092 }
1093 else
1094 {
1095 /*
1096 * Memory, register.
1097 */
1098 IEM_MC_BEGIN(0, 2);
1099 IEM_MC_LOCAL(uint32_t, uSrc);
1100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1101
1102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1104 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1105 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1106
1107 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1108 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1109
1110 IEM_MC_ADVANCE_RIP();
1111 IEM_MC_END();
1112 }
1113 return VINF_SUCCESS;
1114}
1115
1116
1117/** Opcode VEX 0xf3 0x0f 0x10 - movsd Vx, Wsd */
1118FNIEMOP_STUB(iemOp_movss_Vx_Wss);
1119
1120/** Opcode 0xf2 0x0f 0x10 - movsd Vx, Wsd */
1121FNIEMOP_STUB(iemOp_movsd_Vx_Wsd);
1122
1123
1124/**
1125 * @opcode 0x11
1126 * @oppfx none
1127 * @opcpuid sse
1128 * @opgroup og_sse_simdfp_datamove
1129 * @opxcpttype 4UA
1130 * @optest op1=1 op2=2 -> op1=2
1131 * @optest op1=0 op2=-42 -> op1=-42
1132 */
1133FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1134{
1135 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1136 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1137 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1138 {
1139 /*
1140 * Register, register.
1141 */
1142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1143 IEM_MC_BEGIN(0, 0);
1144 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1145 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1146 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1147 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1148 IEM_MC_ADVANCE_RIP();
1149 IEM_MC_END();
1150 }
1151 else
1152 {
1153 /*
1154 * Memory, register.
1155 */
1156 IEM_MC_BEGIN(0, 2);
1157 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1159
1160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1162 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1163 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1164
1165 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1166 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1167
1168 IEM_MC_ADVANCE_RIP();
1169 IEM_MC_END();
1170 }
1171 return VINF_SUCCESS;
1172}
1173
1174
1175/**
1176 * @opcode 0x11
1177 * @oppfx 0x66
1178 * @opcpuid sse2
1179 * @opgroup og_sse2_pcksclr_datamove
1180 * @opxcpttype 4UA
1181 * @optest op1=1 op2=2 -> op1=2
1182 * @optest op1=0 op2=-42 -> op1=-42
1183 */
1184FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1185{
1186 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1187 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1188 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1189 {
1190 /*
1191 * Register, register.
1192 */
1193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1194 IEM_MC_BEGIN(0, 0);
1195 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1196 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1197 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1198 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1199 IEM_MC_ADVANCE_RIP();
1200 IEM_MC_END();
1201 }
1202 else
1203 {
1204 /*
1205 * Memory, register.
1206 */
1207 IEM_MC_BEGIN(0, 2);
1208 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1210
1211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1213 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1214 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1215
1216 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1217 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1218
1219 IEM_MC_ADVANCE_RIP();
1220 IEM_MC_END();
1221 }
1222 return VINF_SUCCESS;
1223}
1224
1225
1226/**
1227 * @opcode 0x11
1228 * @oppfx 0xf3
1229 * @opcpuid sse
1230 * @opgroup og_sse_simdfp_datamove
1231 * @opxcpttype 5
1232 * @optest op1=1 op2=2 -> op1=2
1233 * @optest op1=0 op2=-22 -> op1=-22
1234 */
1235FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1236{
1237 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1238 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1239 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1240 {
1241 /*
1242 * Register, register.
1243 */
1244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1245 IEM_MC_BEGIN(0, 1);
1246 IEM_MC_LOCAL(uint32_t, uSrc);
1247
1248 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1249 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1250 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1251 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1252
1253 IEM_MC_ADVANCE_RIP();
1254 IEM_MC_END();
1255 }
1256 else
1257 {
1258 /*
1259 * Memory, register.
1260 */
1261 IEM_MC_BEGIN(0, 2);
1262 IEM_MC_LOCAL(uint32_t, uSrc);
1263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1264
1265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1267 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1268 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1269
1270 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1271 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1272
1273 IEM_MC_ADVANCE_RIP();
1274 IEM_MC_END();
1275 }
1276 return VINF_SUCCESS;
1277}
1278
1279
1280/**
1281 * @opcode 0x11
1282 * @oppfx 0xf2
1283 * @opcpuid sse2
1284 * @opgroup og_sse2_pcksclr_datamove
1285 * @opxcpttype 5
1286 * @optest op1=1 op2=2 -> op1=2
1287 * @optest op1=0 op2=-42 -> op1=-42
1288 */
1289FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1290{
1291 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1292 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1293 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1294 {
1295 /*
1296 * Register, register.
1297 */
1298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1299 IEM_MC_BEGIN(0, 1);
1300 IEM_MC_LOCAL(uint64_t, uSrc);
1301
1302 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1303 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1304 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1305 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1306
1307 IEM_MC_ADVANCE_RIP();
1308 IEM_MC_END();
1309 }
1310 else
1311 {
1312 /*
1313 * Memory, register.
1314 */
1315 IEM_MC_BEGIN(0, 2);
1316 IEM_MC_LOCAL(uint64_t, uSrc);
1317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1318
1319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1321 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1322 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1323
1324 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1325 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1326
1327 IEM_MC_ADVANCE_RIP();
1328 IEM_MC_END();
1329 }
1330 return VINF_SUCCESS;
1331}
1332
1333
1334FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1335{
1336 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1337 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1338 {
1339 /**
1340 * @opcode 0x12
1341 * @opcodesub 11 mr/reg
1342 * @oppfx none
1343 * @opcpuid sse
1344 * @opgroup og_sse_simdfp_datamove
1345 * @opxcpttype 5
1346 * @optest op1=1 op2=2 -> op1=2
1347 * @optest op1=0 op2=-42 -> op1=-42
1348 */
1349 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1350
1351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1352 IEM_MC_BEGIN(0, 1);
1353 IEM_MC_LOCAL(uint64_t, uSrc);
1354
1355 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1356 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1357 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1358 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1359
1360 IEM_MC_ADVANCE_RIP();
1361 IEM_MC_END();
1362 }
1363 else
1364 {
1365 /**
1366 * @opdone
1367 * @opcode 0x12
1368 * @opcodesub !11 mr/reg
1369 * @oppfx none
1370 * @opcpuid sse
1371 * @opgroup og_sse_simdfp_datamove
1372 * @opxcpttype 5
1373 * @optest op1=1 op2=2 -> op1=2
1374 * @optest op1=0 op2=-42 -> op1=-42
1375 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1376 */
1377 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1378
1379 IEM_MC_BEGIN(0, 2);
1380 IEM_MC_LOCAL(uint64_t, uSrc);
1381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1382
1383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1385 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1386 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1387
1388 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1389 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1390
1391 IEM_MC_ADVANCE_RIP();
1392 IEM_MC_END();
1393 }
1394 return VINF_SUCCESS;
1395}
1396
1397
1398/**
1399 * @opcode 0x12
1400 * @opcodesub !11 mr/reg
1401 * @oppfx 0x66
1402 * @opcpuid sse2
1403 * @opgroup og_sse2_pcksclr_datamove
1404 * @opxcpttype 5
1405 * @optest op1=1 op2=2 -> op1=2
1406 * @optest op1=0 op2=-42 -> op1=-42
1407 */
1408FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1409{
1410 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1411 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1412 {
1413 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1414
1415 IEM_MC_BEGIN(0, 2);
1416 IEM_MC_LOCAL(uint64_t, uSrc);
1417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1418
1419 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1421 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1422 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1423
1424 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1425 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1426
1427 IEM_MC_ADVANCE_RIP();
1428 IEM_MC_END();
1429 return VINF_SUCCESS;
1430 }
1431
1432 /**
1433 * @opdone
1434 * @opmnemonic ud660f12m3
1435 * @opcode 0x12
1436 * @opcodesub 11 mr/reg
1437 * @oppfx 0x66
1438 * @opunused immediate
1439 * @opcpuid sse
1440 * @optest ->
1441 */
1442 return IEMOP_RAISE_INVALID_OPCODE();
1443}
1444
1445
1446/**
1447 * @opcode 0x12
1448 * @oppfx 0xf3
1449 * @opcpuid sse3
1450 * @opgroup og_sse3_pcksclr_datamove
1451 * @opxcpttype 4
1452 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1453 * op1=0x00000002000000020000000100000001
1454 */
1455FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1456{
1457 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1458 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1459 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1460 {
1461 /*
1462 * Register, register.
1463 */
1464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1465 IEM_MC_BEGIN(2, 0);
1466 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1467 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1468
1469 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1470 IEM_MC_PREPARE_SSE_USAGE();
1471
1472 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1473 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1474 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1475
1476 IEM_MC_ADVANCE_RIP();
1477 IEM_MC_END();
1478 }
1479 else
1480 {
1481 /*
1482 * Register, memory.
1483 */
1484 IEM_MC_BEGIN(2, 2);
1485 IEM_MC_LOCAL(RTUINT128U, uSrc);
1486 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1487 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1488 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1489
1490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1492 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1493 IEM_MC_PREPARE_SSE_USAGE();
1494
1495 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1496 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1497 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1498
1499 IEM_MC_ADVANCE_RIP();
1500 IEM_MC_END();
1501 }
1502 return VINF_SUCCESS;
1503}
1504
1505
1506/**
1507 * @opcode 0x12
1508 * @oppfx 0xf2
1509 * @opcpuid sse3
1510 * @opgroup og_sse3_pcksclr_datamove
1511 * @opxcpttype 5
1512 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1513 * op1=0x22222222111111112222222211111111
1514 */
1515FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1516{
1517 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1518 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1519 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1520 {
1521 /*
1522 * Register, register.
1523 */
1524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1525 IEM_MC_BEGIN(2, 0);
1526 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1527 IEM_MC_ARG(uint64_t, uSrc, 1);
1528
1529 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1530 IEM_MC_PREPARE_SSE_USAGE();
1531
1532 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1533 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1534 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1535
1536 IEM_MC_ADVANCE_RIP();
1537 IEM_MC_END();
1538 }
1539 else
1540 {
1541 /*
1542 * Register, memory.
1543 */
1544 IEM_MC_BEGIN(2, 2);
1545 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1546 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1547 IEM_MC_ARG(uint64_t, uSrc, 1);
1548
1549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1551 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1552 IEM_MC_PREPARE_SSE_USAGE();
1553
1554 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1555 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1556 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1557
1558 IEM_MC_ADVANCE_RIP();
1559 IEM_MC_END();
1560 }
1561 return VINF_SUCCESS;
1562}
1563
1564
1565/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1566FNIEMOP_STUB(iemOp_movlps_Mq_Vq);
1567
1568/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1569FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1570{
1571 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1572 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1573 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1574 {
1575#if 0
1576 /*
1577 * Register, register.
1578 */
1579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1580 IEM_MC_BEGIN(0, 1);
1581 IEM_MC_LOCAL(uint64_t, uSrc);
1582 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1583 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1584 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1585 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1586 IEM_MC_ADVANCE_RIP();
1587 IEM_MC_END();
1588#else
1589 return IEMOP_RAISE_INVALID_OPCODE();
1590#endif
1591 }
1592 else
1593 {
1594 /*
1595 * Memory, register.
1596 */
1597 IEM_MC_BEGIN(0, 2);
1598 IEM_MC_LOCAL(uint64_t, uSrc);
1599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1600
1601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1603 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1604 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1605
1606 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1607 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1608
1609 IEM_MC_ADVANCE_RIP();
1610 IEM_MC_END();
1611 }
1612 return VINF_SUCCESS;
1613}
1614
1615/* Opcode 0xf3 0x0f 0x13 - invalid */
1616/* Opcode 0xf2 0x0f 0x13 - invalid */
1617
1618/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1619FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1620/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1621FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1622/* Opcode 0xf3 0x0f 0x14 - invalid */
1623/* Opcode 0xf2 0x0f 0x14 - invalid */
1624/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1625FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1626/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1627FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1628/* Opcode 0xf3 0x0f 0x15 - invalid */
1629/* Opcode 0xf2 0x0f 0x15 - invalid */
1630/** Opcode 0x0f 0x16 - movhpsv1 Vdq, Mq movlhps Vdq, Uq */
1631FNIEMOP_STUB(iemOp_movhpsv1_Vdq_Mq__movlhps_Vdq_Uq); //NEXT
1632/** Opcode 0x66 0x0f 0x16 - movhpdv1 Vdq, Mq */
1633FNIEMOP_STUB(iemOp_movhpdv1_Vdq_Mq); //NEXT
1634/** Opcode 0xf3 0x0f 0x16 - movshdup Vx, Wx */
1635FNIEMOP_STUB(iemOp_movshdup_Vx_Wx); //NEXT
1636/* Opcode 0xf2 0x0f 0x16 - invalid */
1637/** Opcode 0x0f 0x17 - movhpsv1 Mq, Vq */
1638FNIEMOP_STUB(iemOp_movhpsv1_Mq_Vq); //NEXT
1639/** Opcode 0x66 0x0f 0x17 - movhpdv1 Mq, Vq */
1640FNIEMOP_STUB(iemOp_movhpdv1_Mq_Vq); //NEXT
1641/* Opcode 0xf3 0x0f 0x17 - invalid */
1642/* Opcode 0xf2 0x0f 0x17 - invalid */
1643
1644
1645/** Opcode 0x0f 0x18. */
1646FNIEMOP_DEF(iemOp_prefetch_Grp16)
1647{
1648 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1649 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1650 {
1651 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1652 {
1653 case 4: /* Aliased to /0 for the time being according to AMD. */
1654 case 5: /* Aliased to /0 for the time being according to AMD. */
1655 case 6: /* Aliased to /0 for the time being according to AMD. */
1656 case 7: /* Aliased to /0 for the time being according to AMD. */
1657 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1658 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1659 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1660 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1661 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1662 }
1663
1664 IEM_MC_BEGIN(0, 1);
1665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1668 /* Currently a NOP. */
1669 NOREF(GCPtrEffSrc);
1670 IEM_MC_ADVANCE_RIP();
1671 IEM_MC_END();
1672 return VINF_SUCCESS;
1673 }
1674
1675 return IEMOP_RAISE_INVALID_OPCODE();
1676}
1677
1678
1679/** Opcode 0x0f 0x19..0x1f. */
1680FNIEMOP_DEF(iemOp_nop_Ev)
1681{
1682 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1684 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1685 {
1686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1687 IEM_MC_BEGIN(0, 0);
1688 IEM_MC_ADVANCE_RIP();
1689 IEM_MC_END();
1690 }
1691 else
1692 {
1693 IEM_MC_BEGIN(0, 1);
1694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1697 /* Currently a NOP. */
1698 NOREF(GCPtrEffSrc);
1699 IEM_MC_ADVANCE_RIP();
1700 IEM_MC_END();
1701 }
1702 return VINF_SUCCESS;
1703}
1704
1705
1706/** Opcode 0x0f 0x20. */
1707FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1708{
1709 /* mod is ignored, as is operand size overrides. */
1710 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1711 IEMOP_HLP_MIN_386();
1712 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1713 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1714 else
1715 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1716
1717 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1718 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1719 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1720 {
1721 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1722 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1723 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1724 iCrReg |= 8;
1725 }
1726 switch (iCrReg)
1727 {
1728 case 0: case 2: case 3: case 4: case 8:
1729 break;
1730 default:
1731 return IEMOP_RAISE_INVALID_OPCODE();
1732 }
1733 IEMOP_HLP_DONE_DECODING();
1734
1735 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1736}
1737
1738
1739/** Opcode 0x0f 0x21. */
1740FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1741{
1742 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1743 IEMOP_HLP_MIN_386();
1744 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1746 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1747 return IEMOP_RAISE_INVALID_OPCODE();
1748 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1749 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1750 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1751}
1752
1753
1754/** Opcode 0x0f 0x22. */
1755FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1756{
1757 /* mod is ignored, as is operand size overrides. */
1758 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1759 IEMOP_HLP_MIN_386();
1760 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1761 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1762 else
1763 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1764
1765 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1766 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1767 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1768 {
1769 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1770 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1771 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1772 iCrReg |= 8;
1773 }
1774 switch (iCrReg)
1775 {
1776 case 0: case 2: case 3: case 4: case 8:
1777 break;
1778 default:
1779 return IEMOP_RAISE_INVALID_OPCODE();
1780 }
1781 IEMOP_HLP_DONE_DECODING();
1782
1783 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1784}
1785
1786
1787/** Opcode 0x0f 0x23. */
1788FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1789{
1790 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1791 IEMOP_HLP_MIN_386();
1792 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1794 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1795 return IEMOP_RAISE_INVALID_OPCODE();
1796 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1797 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1798 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1799}
1800
1801
1802/** Opcode 0x0f 0x24. */
1803FNIEMOP_DEF(iemOp_mov_Rd_Td)
1804{
1805 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1806 /** @todo works on 386 and 486. */
1807 /* The RM byte is not considered, see testcase. */
1808 return IEMOP_RAISE_INVALID_OPCODE();
1809}
1810
1811
1812/** Opcode 0x0f 0x26. */
1813FNIEMOP_DEF(iemOp_mov_Td_Rd)
1814{
1815 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1816 /** @todo works on 386 and 486. */
1817 /* The RM byte is not considered, see testcase. */
1818 return IEMOP_RAISE_INVALID_OPCODE();
1819}
1820
1821
1822/** Opcode 0x0f 0x28 - movaps Vps, Wps */
1823FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
1824{
1825 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1826 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1827 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1828 {
1829 /*
1830 * Register, register.
1831 */
1832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1833 IEM_MC_BEGIN(0, 0);
1834 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1835 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1836 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1837 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1838 IEM_MC_ADVANCE_RIP();
1839 IEM_MC_END();
1840 }
1841 else
1842 {
1843 /*
1844 * Register, memory.
1845 */
1846 IEM_MC_BEGIN(0, 2);
1847 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1849
1850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1852 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1853 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1854
1855 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1856 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1857
1858 IEM_MC_ADVANCE_RIP();
1859 IEM_MC_END();
1860 }
1861 return VINF_SUCCESS;
1862}
1863
1864/** Opcode 0x66 0x0f 0x28 - movapd Vpd, Wpd */
1865FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
1866{
1867 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1868 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1869 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1870 {
1871 /*
1872 * Register, register.
1873 */
1874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1875 IEM_MC_BEGIN(0, 0);
1876 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1877 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1878 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1879 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1880 IEM_MC_ADVANCE_RIP();
1881 IEM_MC_END();
1882 }
1883 else
1884 {
1885 /*
1886 * Register, memory.
1887 */
1888 IEM_MC_BEGIN(0, 2);
1889 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1891
1892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1894 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1895 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1896
1897 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1898 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1899
1900 IEM_MC_ADVANCE_RIP();
1901 IEM_MC_END();
1902 }
1903 return VINF_SUCCESS;
1904}
1905
1906/* Opcode 0xf3 0x0f 0x28 - invalid */
1907/* Opcode 0xf2 0x0f 0x28 - invalid */
1908
1909/** Opcode 0x0f 0x29 - movaps Wps, Vps */
1910FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
1911{
1912 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1913 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1914 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1915 {
1916 /*
1917 * Register, register.
1918 */
1919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1920 IEM_MC_BEGIN(0, 0);
1921 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1922 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1923 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1924 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1925 IEM_MC_ADVANCE_RIP();
1926 IEM_MC_END();
1927 }
1928 else
1929 {
1930 /*
1931 * Memory, register.
1932 */
1933 IEM_MC_BEGIN(0, 2);
1934 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1935 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1936
1937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1939 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1940 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1941
1942 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1943 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1944
1945 IEM_MC_ADVANCE_RIP();
1946 IEM_MC_END();
1947 }
1948 return VINF_SUCCESS;
1949}
1950
1951/** Opcode 0x66 0x0f 0x29 - movapd Wpd,Vpd */
1952FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
1953{
1954 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
1955 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1956 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1957 {
1958 /*
1959 * Register, register.
1960 */
1961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1962 IEM_MC_BEGIN(0, 0);
1963 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1964 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1965 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1966 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1967 IEM_MC_ADVANCE_RIP();
1968 IEM_MC_END();
1969 }
1970 else
1971 {
1972 /*
1973 * Memory, register.
1974 */
1975 IEM_MC_BEGIN(0, 2);
1976 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1978
1979 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1981 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1982 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1983
1984 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1985 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1986
1987 IEM_MC_ADVANCE_RIP();
1988 IEM_MC_END();
1989 }
1990 return VINF_SUCCESS;
1991}
1992
1993/* Opcode 0xf3 0x0f 0x29 - invalid */
1994/* Opcode 0xf2 0x0f 0x29 - invalid */
1995
1996
1997/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
1998FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
1999/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2000FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2001/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2002FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2003/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2004FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2005
2006
2007/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
2008FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2009{
2010 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
2011 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2012 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2013 {
2014 /*
2015 * memory, register.
2016 */
2017 IEM_MC_BEGIN(0, 2);
2018 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2020
2021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2023 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2024 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2025
2026 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2027 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2028
2029 IEM_MC_ADVANCE_RIP();
2030 IEM_MC_END();
2031 }
2032 /* The register, register encoding is invalid. */
2033 else
2034 return IEMOP_RAISE_INVALID_OPCODE();
2035 return VINF_SUCCESS;
2036}
2037
2038/** Opcode 0x66 0x0f 0x2b - movntpd Mpd, Vpd */
2039FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2040{
2041 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
2042 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2043 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2044 {
2045 /*
2046 * memory, register.
2047 */
2048 IEM_MC_BEGIN(0, 2);
2049 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2051
2052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2054 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2055 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2056
2057 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2058 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2059
2060 IEM_MC_ADVANCE_RIP();
2061 IEM_MC_END();
2062 }
2063 /* The register, register encoding is invalid. */
2064 else
2065 return IEMOP_RAISE_INVALID_OPCODE();
2066 return VINF_SUCCESS;
2067}
2068/* Opcode 0xf3 0x0f 0x2b - invalid */
2069/* Opcode 0xf2 0x0f 0x2b - invalid */
2070
2071
2072/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2073FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2074/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2075FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2076/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2077FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2078/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2079FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2080
2081/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2082FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2083/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2084FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2085/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2086FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2087/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2088FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2089
2090/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2091FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2092/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2093FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2094/* Opcode 0xf3 0x0f 0x2e - invalid */
2095/* Opcode 0xf2 0x0f 0x2e - invalid */
2096
2097/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2098FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2099/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2100FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2101/* Opcode 0xf3 0x0f 0x2f - invalid */
2102/* Opcode 0xf2 0x0f 0x2f - invalid */
2103
2104/** Opcode 0x0f 0x30. */
2105FNIEMOP_DEF(iemOp_wrmsr)
2106{
2107 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2109 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2110}
2111
2112
2113/** Opcode 0x0f 0x31. */
2114FNIEMOP_DEF(iemOp_rdtsc)
2115{
2116 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2118 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2119}
2120
2121
2122/** Opcode 0x0f 0x33. */
2123FNIEMOP_DEF(iemOp_rdmsr)
2124{
2125 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2127 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2128}
2129
2130
2131/** Opcode 0x0f 0x34. */
2132FNIEMOP_STUB(iemOp_rdpmc);
2133/** Opcode 0x0f 0x34. */
2134FNIEMOP_STUB(iemOp_sysenter);
2135/** Opcode 0x0f 0x35. */
2136FNIEMOP_STUB(iemOp_sysexit);
2137/** Opcode 0x0f 0x37. */
2138FNIEMOP_STUB(iemOp_getsec);
2139/** Opcode 0x0f 0x38. */
2140FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
2141/** Opcode 0x0f 0x3a. */
2142FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
2143
2144
2145/**
2146 * Implements a conditional move.
2147 *
2148 * Wish there was an obvious way to do this where we could share and reduce
2149 * code bloat.
2150 *
2151 * @param a_Cnd The conditional "microcode" operation.
2152 */
2153#define CMOV_X(a_Cnd) \
2154 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2155 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2156 { \
2157 switch (pVCpu->iem.s.enmEffOpSize) \
2158 { \
2159 case IEMMODE_16BIT: \
2160 IEM_MC_BEGIN(0, 1); \
2161 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2162 a_Cnd { \
2163 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2164 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2165 } IEM_MC_ENDIF(); \
2166 IEM_MC_ADVANCE_RIP(); \
2167 IEM_MC_END(); \
2168 return VINF_SUCCESS; \
2169 \
2170 case IEMMODE_32BIT: \
2171 IEM_MC_BEGIN(0, 1); \
2172 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2173 a_Cnd { \
2174 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2175 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2176 } IEM_MC_ELSE() { \
2177 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2178 } IEM_MC_ENDIF(); \
2179 IEM_MC_ADVANCE_RIP(); \
2180 IEM_MC_END(); \
2181 return VINF_SUCCESS; \
2182 \
2183 case IEMMODE_64BIT: \
2184 IEM_MC_BEGIN(0, 1); \
2185 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2186 a_Cnd { \
2187 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2188 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2189 } IEM_MC_ENDIF(); \
2190 IEM_MC_ADVANCE_RIP(); \
2191 IEM_MC_END(); \
2192 return VINF_SUCCESS; \
2193 \
2194 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2195 } \
2196 } \
2197 else \
2198 { \
2199 switch (pVCpu->iem.s.enmEffOpSize) \
2200 { \
2201 case IEMMODE_16BIT: \
2202 IEM_MC_BEGIN(0, 2); \
2203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2204 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2206 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2207 a_Cnd { \
2208 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2209 } IEM_MC_ENDIF(); \
2210 IEM_MC_ADVANCE_RIP(); \
2211 IEM_MC_END(); \
2212 return VINF_SUCCESS; \
2213 \
2214 case IEMMODE_32BIT: \
2215 IEM_MC_BEGIN(0, 2); \
2216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2217 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2219 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2220 a_Cnd { \
2221 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2222 } IEM_MC_ELSE() { \
2223 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2224 } IEM_MC_ENDIF(); \
2225 IEM_MC_ADVANCE_RIP(); \
2226 IEM_MC_END(); \
2227 return VINF_SUCCESS; \
2228 \
2229 case IEMMODE_64BIT: \
2230 IEM_MC_BEGIN(0, 2); \
2231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2232 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2234 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2235 a_Cnd { \
2236 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2237 } IEM_MC_ENDIF(); \
2238 IEM_MC_ADVANCE_RIP(); \
2239 IEM_MC_END(); \
2240 return VINF_SUCCESS; \
2241 \
2242 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2243 } \
2244 } do {} while (0)
2245
2246
2247
2248/** Opcode 0x0f 0x40. */
2249FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2250{
2251 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2252 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2253}
2254
2255
2256/** Opcode 0x0f 0x41. */
2257FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2258{
2259 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2260 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2261}
2262
2263
2264/** Opcode 0x0f 0x42. */
2265FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2266{
2267 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2268 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2269}
2270
2271
2272/** Opcode 0x0f 0x43. */
2273FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2274{
2275 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2276 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2277}
2278
2279
2280/** Opcode 0x0f 0x44. */
2281FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2282{
2283 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2284 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2285}
2286
2287
2288/** Opcode 0x0f 0x45. */
2289FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2290{
2291 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2292 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2293}
2294
2295
2296/** Opcode 0x0f 0x46. */
2297FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2298{
2299 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2300 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2301}
2302
2303
2304/** Opcode 0x0f 0x47. */
2305FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2306{
2307 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2308 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2309}
2310
2311
2312/** Opcode 0x0f 0x48. */
2313FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2314{
2315 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2316 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2317}
2318
2319
2320/** Opcode 0x0f 0x49. */
2321FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2322{
2323 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2324 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2325}
2326
2327
2328/** Opcode 0x0f 0x4a. */
2329FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2330{
2331 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2332 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2333}
2334
2335
2336/** Opcode 0x0f 0x4b. */
2337FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2338{
2339 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2340 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2341}
2342
2343
2344/** Opcode 0x0f 0x4c. */
2345FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2346{
2347 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2348 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2349}
2350
2351
2352/** Opcode 0x0f 0x4d. */
2353FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2354{
2355 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2356 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2357}
2358
2359
2360/** Opcode 0x0f 0x4e. */
2361FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2362{
2363 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2364 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2365}
2366
2367
2368/** Opcode 0x0f 0x4f. */
2369FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2370{
2371 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2372 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2373}
2374
2375#undef CMOV_X
2376
2377/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2378FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2379/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2380FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2381/* Opcode 0xf3 0x0f 0x50 - invalid */
2382/* Opcode 0xf2 0x0f 0x50 - invalid */
2383
2384/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2385FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2386/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2387FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2388/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2389FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2390/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2391FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2392
2393/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2394FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2395/* Opcode 0x66 0x0f 0x52 - invalid */
2396/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2397FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2398/* Opcode 0xf2 0x0f 0x52 - invalid */
2399
2400/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2401FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2402/* Opcode 0x66 0x0f 0x53 - invalid */
2403/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2404FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2405/* Opcode 0xf2 0x0f 0x53 - invalid */
2406
2407/** Opcode 0x0f 0x54 - andps Vps, Wps */
2408FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2409/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2410FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2411/* Opcode 0xf3 0x0f 0x54 - invalid */
2412/* Opcode 0xf2 0x0f 0x54 - invalid */
2413
2414/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2415FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2416/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2417FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2418/* Opcode 0xf3 0x0f 0x55 - invalid */
2419/* Opcode 0xf2 0x0f 0x55 - invalid */
2420
2421/** Opcode 0x0f 0x56 - orps Vps, Wps */
2422FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2423/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2424FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2425/* Opcode 0xf3 0x0f 0x56 - invalid */
2426/* Opcode 0xf2 0x0f 0x56 - invalid */
2427
2428/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2429FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2430/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2431FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2432/* Opcode 0xf3 0x0f 0x57 - invalid */
2433/* Opcode 0xf2 0x0f 0x57 - invalid */
2434
2435/** Opcode 0x0f 0x58 - addps Vps, Wps */
2436FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2437/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2438FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2439/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2440FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2441/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2442FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2443
2444/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2445FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2446/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2447FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2448/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2449FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2450/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2451FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2452
2453/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2454FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2455/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2456FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2457/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2458FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2459/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2460FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2461
2462/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2463FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2464/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2465FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
2466/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
2467FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
2468/* Opcode 0xf2 0x0f 0x5b - invalid */
2469
2470/** Opcode 0x0f 0x5c - subps Vps, Wps */
2471FNIEMOP_STUB(iemOp_subps_Vps_Wps);
2472/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
2473FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
2474/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
2475FNIEMOP_STUB(iemOp_subss_Vss_Wss);
2476/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
2477FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
2478
2479/** Opcode 0x0f 0x5d - minps Vps, Wps */
2480FNIEMOP_STUB(iemOp_minps_Vps_Wps);
2481/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
2482FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
2483/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
2484FNIEMOP_STUB(iemOp_minss_Vss_Wss);
2485/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
2486FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
2487
2488/** Opcode 0x0f 0x5e - divps Vps, Wps */
2489FNIEMOP_STUB(iemOp_divps_Vps_Wps);
2490/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
2491FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
2492/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
2493FNIEMOP_STUB(iemOp_divss_Vss_Wss);
2494/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
2495FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
2496
2497/** Opcode 0x0f 0x5f - maxps Vps, Wps */
2498FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
2499/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
2500FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
2501/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
2502FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
2503/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
2504FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
2505
2506/**
2507 * Common worker for MMX instructions on the forms:
2508 * pxxxx mm1, mm2/mem32
2509 *
2510 * The 2nd operand is the first half of a register, which in the memory case
2511 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2512 * memory accessed for MMX.
2513 *
2514 * Exceptions type 4.
2515 */
2516FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2517{
2518 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2519 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2520 {
2521 /*
2522 * Register, register.
2523 */
2524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2525 IEM_MC_BEGIN(2, 0);
2526 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2527 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2528 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2529 IEM_MC_PREPARE_SSE_USAGE();
2530 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2531 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2532 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2533 IEM_MC_ADVANCE_RIP();
2534 IEM_MC_END();
2535 }
2536 else
2537 {
2538 /*
2539 * Register, memory.
2540 */
2541 IEM_MC_BEGIN(2, 2);
2542 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2543 IEM_MC_LOCAL(uint64_t, uSrc);
2544 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2545 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2546
2547 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2549 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2550 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2551
2552 IEM_MC_PREPARE_SSE_USAGE();
2553 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2554 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2555
2556 IEM_MC_ADVANCE_RIP();
2557 IEM_MC_END();
2558 }
2559 return VINF_SUCCESS;
2560}
2561
2562
2563/**
2564 * Common worker for SSE2 instructions on the forms:
2565 * pxxxx xmm1, xmm2/mem128
2566 *
2567 * The 2nd operand is the first half of a register, which in the memory case
2568 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2569 * memory accessed for MMX.
2570 *
2571 * Exceptions type 4.
2572 */
2573FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2574{
2575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2576 if (!pImpl->pfnU64)
2577 return IEMOP_RAISE_INVALID_OPCODE();
2578 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2579 {
2580 /*
2581 * Register, register.
2582 */
2583 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2584 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2586 IEM_MC_BEGIN(2, 0);
2587 IEM_MC_ARG(uint64_t *, pDst, 0);
2588 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2589 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2590 IEM_MC_PREPARE_FPU_USAGE();
2591 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2592 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2593 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2594 IEM_MC_ADVANCE_RIP();
2595 IEM_MC_END();
2596 }
2597 else
2598 {
2599 /*
2600 * Register, memory.
2601 */
2602 IEM_MC_BEGIN(2, 2);
2603 IEM_MC_ARG(uint64_t *, pDst, 0);
2604 IEM_MC_LOCAL(uint32_t, uSrc);
2605 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2607
2608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2610 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2611 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2612
2613 IEM_MC_PREPARE_FPU_USAGE();
2614 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2615 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2616
2617 IEM_MC_ADVANCE_RIP();
2618 IEM_MC_END();
2619 }
2620 return VINF_SUCCESS;
2621}
2622
2623
2624/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2625FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2626{
2627 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2628 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2629}
2630
2631/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
2632FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
2633{
2634 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
2635 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2636}
2637
2638/* Opcode 0xf3 0x0f 0x60 - invalid */
2639
2640
2641/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2642FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2643{
2644 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2645 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2646}
2647
2648/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
2649FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
2650{
2651 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
2652 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2653}
2654
2655/* Opcode 0xf3 0x0f 0x61 - invalid */
2656
2657
2658/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2659FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2660{
2661 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2662 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2663}
2664
2665/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
2666FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
2667{
2668 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
2669 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2670}
2671
2672/* Opcode 0xf3 0x0f 0x62 - invalid */
2673
2674
2675
2676/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2677FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2678/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
2679FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
2680/* Opcode 0xf3 0x0f 0x63 - invalid */
2681
2682/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2683FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2684/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
2685FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
2686/* Opcode 0xf3 0x0f 0x64 - invalid */
2687
2688/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2689FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2690/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
2691FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
2692/* Opcode 0xf3 0x0f 0x65 - invalid */
2693
2694/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2695FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2696/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
2697FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
2698/* Opcode 0xf3 0x0f 0x66 - invalid */
2699
2700/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2701FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2702/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
2703FNIEMOP_STUB(iemOp_packuswb_Vx_W);
2704/* Opcode 0xf3 0x0f 0x67 - invalid */
2705
2706
2707/**
2708 * Common worker for MMX instructions on the form:
2709 * pxxxx mm1, mm2/mem64
2710 *
2711 * The 2nd operand is the second half of a register, which in the memory case
2712 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2713 * where it may read the full 128 bits or only the upper 64 bits.
2714 *
2715 * Exceptions type 4.
2716 */
2717FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2718{
2719 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2720 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2721 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2722 {
2723 /*
2724 * Register, register.
2725 */
2726 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2727 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2729 IEM_MC_BEGIN(2, 0);
2730 IEM_MC_ARG(uint64_t *, pDst, 0);
2731 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2732 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2733 IEM_MC_PREPARE_FPU_USAGE();
2734 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2735 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2736 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2737 IEM_MC_ADVANCE_RIP();
2738 IEM_MC_END();
2739 }
2740 else
2741 {
2742 /*
2743 * Register, memory.
2744 */
2745 IEM_MC_BEGIN(2, 2);
2746 IEM_MC_ARG(uint64_t *, pDst, 0);
2747 IEM_MC_LOCAL(uint64_t, uSrc);
2748 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2750
2751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2753 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2754 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2755
2756 IEM_MC_PREPARE_FPU_USAGE();
2757 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2758 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2759
2760 IEM_MC_ADVANCE_RIP();
2761 IEM_MC_END();
2762 }
2763 return VINF_SUCCESS;
2764}
2765
2766
2767/**
2768 * Common worker for SSE2 instructions on the form:
2769 * pxxxx xmm1, xmm2/mem128
2770 *
2771 * The 2nd operand is the second half of a register, which in the memory case
2772 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2773 * where it may read the full 128 bits or only the upper 64 bits.
2774 *
2775 * Exceptions type 4.
2776 */
2777FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2778{
2779 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2780 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2781 {
2782 /*
2783 * Register, register.
2784 */
2785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2786 IEM_MC_BEGIN(2, 0);
2787 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2788 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2789 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2790 IEM_MC_PREPARE_SSE_USAGE();
2791 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2792 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2793 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2794 IEM_MC_ADVANCE_RIP();
2795 IEM_MC_END();
2796 }
2797 else
2798 {
2799 /*
2800 * Register, memory.
2801 */
2802 IEM_MC_BEGIN(2, 2);
2803 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2804 IEM_MC_LOCAL(RTUINT128U, uSrc);
2805 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
2806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2807
2808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2810 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2811 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2812
2813 IEM_MC_PREPARE_SSE_USAGE();
2814 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2815 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2816
2817 IEM_MC_ADVANCE_RIP();
2818 IEM_MC_END();
2819 }
2820 return VINF_SUCCESS;
2821}
2822
2823
2824/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2825FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2826{
2827 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2828 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2829}
2830
2831/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
2832FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
2833{
2834 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
2835 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2836}
2837/* Opcode 0xf3 0x0f 0x68 - invalid */
2838
2839
2840/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2841FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2842{
2843 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2844 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2845}
2846
2847/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
2848FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
2849{
2850 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
2851 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2852
2853}
2854/* Opcode 0xf3 0x0f 0x69 - invalid */
2855
2856
2857/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2858FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2859{
2860 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2861 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2862}
2863
2864/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
2865FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
2866{
2867 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
2868 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2869}
2870/* Opcode 0xf3 0x0f 0x6a - invalid */
2871
2872
2873/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2874FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2875/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
2876FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
2877/* Opcode 0xf3 0x0f 0x6b - invalid */
2878
2879
2880/* Opcode 0x0f 0x6c - invalid */
2881
2882/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
2883FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
2884{
2885 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
2886 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2887}
2888
2889/* Opcode 0xf3 0x0f 0x6c - invalid */
2890/* Opcode 0xf2 0x0f 0x6c - invalid */
2891
2892
2893/* Opcode 0x0f 0x6d - invalid */
2894
2895/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
2896FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
2897{
2898 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
2899 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2900}
2901
2902/* Opcode 0xf3 0x0f 0x6d - invalid */
2903
2904
2905/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
2906FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
2907{
2908 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2909 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2910 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2911 else
2912 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2913 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2914 {
2915 /* MMX, greg */
2916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2917 IEM_MC_BEGIN(0, 1);
2918 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2919 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2920 IEM_MC_LOCAL(uint64_t, u64Tmp);
2921 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2922 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2923 else
2924 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2925 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2926 IEM_MC_ADVANCE_RIP();
2927 IEM_MC_END();
2928 }
2929 else
2930 {
2931 /* MMX, [mem] */
2932 IEM_MC_BEGIN(0, 2);
2933 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2934 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2937 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2938 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2939 {
2940 IEM_MC_LOCAL(uint64_t, u64Tmp);
2941 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2942 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2943 }
2944 else
2945 {
2946 IEM_MC_LOCAL(uint32_t, u32Tmp);
2947 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2948 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2949 }
2950 IEM_MC_ADVANCE_RIP();
2951 IEM_MC_END();
2952 }
2953 return VINF_SUCCESS;
2954}
2955
2956/** Opcode 0x66 0x0f 0x6e - movd/q Vy, Ey */
2957FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
2958{
2959 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2960 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2961 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
2962 else
2963 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
2964 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2965 {
2966 /* XMM, greg*/
2967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2968 IEM_MC_BEGIN(0, 1);
2969 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2970 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2971 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2972 {
2973 IEM_MC_LOCAL(uint64_t, u64Tmp);
2974 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2975 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2976 }
2977 else
2978 {
2979 IEM_MC_LOCAL(uint32_t, u32Tmp);
2980 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2981 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2982 }
2983 IEM_MC_ADVANCE_RIP();
2984 IEM_MC_END();
2985 }
2986 else
2987 {
2988 /* XMM, [mem] */
2989 IEM_MC_BEGIN(0, 2);
2990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2991 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2994 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2995 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2996 {
2997 IEM_MC_LOCAL(uint64_t, u64Tmp);
2998 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2999 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3000 }
3001 else
3002 {
3003 IEM_MC_LOCAL(uint32_t, u32Tmp);
3004 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3005 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3006 }
3007 IEM_MC_ADVANCE_RIP();
3008 IEM_MC_END();
3009 }
3010 return VINF_SUCCESS;
3011}
3012
3013/* Opcode 0xf3 0x0f 0x6e - invalid */
3014
3015
3016/** Opcode 0x0f 0x6f - movq Pq, Qq */
3017FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3018{
3019 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3020 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3021 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3022 {
3023 /*
3024 * Register, register.
3025 */
3026 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3027 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3029 IEM_MC_BEGIN(0, 1);
3030 IEM_MC_LOCAL(uint64_t, u64Tmp);
3031 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3032 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3033 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3034 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3035 IEM_MC_ADVANCE_RIP();
3036 IEM_MC_END();
3037 }
3038 else
3039 {
3040 /*
3041 * Register, memory.
3042 */
3043 IEM_MC_BEGIN(0, 2);
3044 IEM_MC_LOCAL(uint64_t, u64Tmp);
3045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3046
3047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3049 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3050 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3051 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3052 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3053
3054 IEM_MC_ADVANCE_RIP();
3055 IEM_MC_END();
3056 }
3057 return VINF_SUCCESS;
3058}
3059
3060/** Opcode 0x66 0x0f 0x6f - movdqa Vx, Wx */
3061FNIEMOP_DEF(iemOp_movdqa_Vx_Wx)
3062{
3063 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3064 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3065 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3066 {
3067 /*
3068 * Register, register.
3069 */
3070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3071 IEM_MC_BEGIN(0, 0);
3072 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3073 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3074 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3075 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3076 IEM_MC_ADVANCE_RIP();
3077 IEM_MC_END();
3078 }
3079 else
3080 {
3081 /*
3082 * Register, memory.
3083 */
3084 IEM_MC_BEGIN(0, 2);
3085 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3086 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3087
3088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3090 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3091 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3092 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3093 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3094
3095 IEM_MC_ADVANCE_RIP();
3096 IEM_MC_END();
3097 }
3098 return VINF_SUCCESS;
3099}
3100
3101/** Opcode 0xf3 0x0f 0x6f - movdqu Vx, Wx */
3102FNIEMOP_DEF(iemOp_movdqu_Vx_Wx)
3103{
3104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3105 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3106 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3107 {
3108 /*
3109 * Register, register.
3110 */
3111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3112 IEM_MC_BEGIN(0, 0);
3113 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3114 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3115 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3116 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3117 IEM_MC_ADVANCE_RIP();
3118 IEM_MC_END();
3119 }
3120 else
3121 {
3122 /*
3123 * Register, memory.
3124 */
3125 IEM_MC_BEGIN(0, 2);
3126 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3127 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3128
3129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3131 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3132 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3133 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3134 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3135
3136 IEM_MC_ADVANCE_RIP();
3137 IEM_MC_END();
3138 }
3139 return VINF_SUCCESS;
3140}
3141
3142
3143/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3144FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3145{
3146 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3147 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3148 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3149 {
3150 /*
3151 * Register, register.
3152 */
3153 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3155
3156 IEM_MC_BEGIN(3, 0);
3157 IEM_MC_ARG(uint64_t *, pDst, 0);
3158 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3159 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3160 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3161 IEM_MC_PREPARE_FPU_USAGE();
3162 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3163 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3164 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3165 IEM_MC_ADVANCE_RIP();
3166 IEM_MC_END();
3167 }
3168 else
3169 {
3170 /*
3171 * Register, memory.
3172 */
3173 IEM_MC_BEGIN(3, 2);
3174 IEM_MC_ARG(uint64_t *, pDst, 0);
3175 IEM_MC_LOCAL(uint64_t, uSrc);
3176 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3178
3179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3180 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3181 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3183 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3184
3185 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3186 IEM_MC_PREPARE_FPU_USAGE();
3187 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3188 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3189
3190 IEM_MC_ADVANCE_RIP();
3191 IEM_MC_END();
3192 }
3193 return VINF_SUCCESS;
3194}
3195
3196/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3197FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3198{
3199 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3200 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3201 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3202 {
3203 /*
3204 * Register, register.
3205 */
3206 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3208
3209 IEM_MC_BEGIN(3, 0);
3210 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3211 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3212 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3213 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3214 IEM_MC_PREPARE_SSE_USAGE();
3215 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3216 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3217 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3218 IEM_MC_ADVANCE_RIP();
3219 IEM_MC_END();
3220 }
3221 else
3222 {
3223 /*
3224 * Register, memory.
3225 */
3226 IEM_MC_BEGIN(3, 2);
3227 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3228 IEM_MC_LOCAL(RTUINT128U, uSrc);
3229 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3230 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3231
3232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3233 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3234 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3236 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3237
3238 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3239 IEM_MC_PREPARE_SSE_USAGE();
3240 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3241 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3242
3243 IEM_MC_ADVANCE_RIP();
3244 IEM_MC_END();
3245 }
3246 return VINF_SUCCESS;
3247}
3248
3249/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3250FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3251{
3252 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3253 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3254 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3255 {
3256 /*
3257 * Register, register.
3258 */
3259 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3261
3262 IEM_MC_BEGIN(3, 0);
3263 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3264 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3265 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3266 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3267 IEM_MC_PREPARE_SSE_USAGE();
3268 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3269 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3270 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3271 IEM_MC_ADVANCE_RIP();
3272 IEM_MC_END();
3273 }
3274 else
3275 {
3276 /*
3277 * Register, memory.
3278 */
3279 IEM_MC_BEGIN(3, 2);
3280 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3281 IEM_MC_LOCAL(RTUINT128U, uSrc);
3282 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3284
3285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3286 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3287 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3289 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3290
3291 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3292 IEM_MC_PREPARE_SSE_USAGE();
3293 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3294 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3295
3296 IEM_MC_ADVANCE_RIP();
3297 IEM_MC_END();
3298 }
3299 return VINF_SUCCESS;
3300}
3301
3302/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3303FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3304{
3305 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3307 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3308 {
3309 /*
3310 * Register, register.
3311 */
3312 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3314
3315 IEM_MC_BEGIN(3, 0);
3316 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3317 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3318 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3319 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3320 IEM_MC_PREPARE_SSE_USAGE();
3321 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3322 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3323 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3324 IEM_MC_ADVANCE_RIP();
3325 IEM_MC_END();
3326 }
3327 else
3328 {
3329 /*
3330 * Register, memory.
3331 */
3332 IEM_MC_BEGIN(3, 2);
3333 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3334 IEM_MC_LOCAL(RTUINT128U, uSrc);
3335 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3337
3338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3339 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3340 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3342 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3343
3344 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3345 IEM_MC_PREPARE_SSE_USAGE();
3346 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3347 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3348
3349 IEM_MC_ADVANCE_RIP();
3350 IEM_MC_END();
3351 }
3352 return VINF_SUCCESS;
3353}
3354
3355
3356/** Opcode 0x0f 0x71 11/2. */
3357FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3358
3359/** Opcode 0x66 0x0f 0x71 11/2. */
3360FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3361
3362/** Opcode 0x0f 0x71 11/4. */
3363FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3364
3365/** Opcode 0x66 0x0f 0x71 11/4. */
3366FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3367
3368/** Opcode 0x0f 0x71 11/6. */
3369FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3370
3371/** Opcode 0x66 0x0f 0x71 11/6. */
3372FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3373
3374
3375/**
3376 * Group 12 jump table for register variant.
3377 */
3378IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3379{
3380 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3381 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3382 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3383 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3384 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3385 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3386 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3387 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3388};
3389AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3390
3391
3392/** Opcode 0x0f 0x71. */
3393FNIEMOP_DEF(iemOp_Grp12)
3394{
3395 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3396 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3397 /* register, register */
3398 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3399 + pVCpu->iem.s.idxPrefix], bRm);
3400 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3401}
3402
3403
3404/** Opcode 0x0f 0x72 11/2. */
3405FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3406
3407/** Opcode 0x66 0x0f 0x72 11/2. */
3408FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
3409
3410/** Opcode 0x0f 0x72 11/4. */
3411FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3412
3413/** Opcode 0x66 0x0f 0x72 11/4. */
3414FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
3415
3416/** Opcode 0x0f 0x72 11/6. */
3417FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3418
3419/** Opcode 0x66 0x0f 0x72 11/6. */
3420FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
3421
3422
3423/**
3424 * Group 13 jump table for register variant.
3425 */
3426IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
3427{
3428 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3429 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3430 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3431 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3432 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3433 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3434 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3435 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3436};
3437AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
3438
3439/** Opcode 0x0f 0x72. */
3440FNIEMOP_DEF(iemOp_Grp13)
3441{
3442 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3443 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3444 /* register, register */
3445 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3446 + pVCpu->iem.s.idxPrefix], bRm);
3447 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3448}
3449
3450
3451/** Opcode 0x0f 0x73 11/2. */
3452FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3453
3454/** Opcode 0x66 0x0f 0x73 11/2. */
3455FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
3456
3457/** Opcode 0x66 0x0f 0x73 11/3. */
3458FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
3459
3460/** Opcode 0x0f 0x73 11/6. */
3461FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3462
3463/** Opcode 0x66 0x0f 0x73 11/6. */
3464FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
3465
3466/** Opcode 0x66 0x0f 0x73 11/7. */
3467FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
3468
3469/**
3470 * Group 14 jump table for register variant.
3471 */
3472IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
3473{
3474 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3475 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3476 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3477 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3478 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3479 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3480 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3481 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3482};
3483AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
3484
3485
3486/** Opcode 0x0f 0x73. */
3487FNIEMOP_DEF(iemOp_Grp14)
3488{
3489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3490 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3491 /* register, register */
3492 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3493 + pVCpu->iem.s.idxPrefix], bRm);
3494 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3495}
3496
3497
3498/**
3499 * Common worker for MMX instructions on the form:
3500 * pxxx mm1, mm2/mem64
3501 */
3502FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3503{
3504 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3505 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3506 {
3507 /*
3508 * Register, register.
3509 */
3510 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3511 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3513 IEM_MC_BEGIN(2, 0);
3514 IEM_MC_ARG(uint64_t *, pDst, 0);
3515 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3516 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3517 IEM_MC_PREPARE_FPU_USAGE();
3518 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3519 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3520 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3521 IEM_MC_ADVANCE_RIP();
3522 IEM_MC_END();
3523 }
3524 else
3525 {
3526 /*
3527 * Register, memory.
3528 */
3529 IEM_MC_BEGIN(2, 2);
3530 IEM_MC_ARG(uint64_t *, pDst, 0);
3531 IEM_MC_LOCAL(uint64_t, uSrc);
3532 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3534
3535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3537 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3538 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3539
3540 IEM_MC_PREPARE_FPU_USAGE();
3541 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3542 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3543
3544 IEM_MC_ADVANCE_RIP();
3545 IEM_MC_END();
3546 }
3547 return VINF_SUCCESS;
3548}
3549
3550
3551/**
3552 * Common worker for SSE2 instructions on the forms:
3553 * pxxx xmm1, xmm2/mem128
3554 *
3555 * Proper alignment of the 128-bit operand is enforced.
3556 * Exceptions type 4. SSE2 cpuid checks.
3557 */
3558FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3559{
3560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3561 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3562 {
3563 /*
3564 * Register, register.
3565 */
3566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3567 IEM_MC_BEGIN(2, 0);
3568 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3569 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3570 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3571 IEM_MC_PREPARE_SSE_USAGE();
3572 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3573 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3574 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3575 IEM_MC_ADVANCE_RIP();
3576 IEM_MC_END();
3577 }
3578 else
3579 {
3580 /*
3581 * Register, memory.
3582 */
3583 IEM_MC_BEGIN(2, 2);
3584 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3585 IEM_MC_LOCAL(RTUINT128U, uSrc);
3586 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3587 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3588
3589 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3591 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3592 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3593
3594 IEM_MC_PREPARE_SSE_USAGE();
3595 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3596 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3597
3598 IEM_MC_ADVANCE_RIP();
3599 IEM_MC_END();
3600 }
3601 return VINF_SUCCESS;
3602}
3603
3604
3605/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3606FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3607{
3608 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3609 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3610}
3611
3612/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
3613FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
3614{
3615 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
3616 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3617}
3618
3619/* Opcode 0xf3 0x0f 0x74 - invalid */
3620/* Opcode 0xf2 0x0f 0x74 - invalid */
3621
3622
3623/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3624FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3625{
3626 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3627 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3628}
3629
3630/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
3631FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
3632{
3633 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
3634 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3635}
3636
3637/* Opcode 0xf3 0x0f 0x75 - invalid */
3638/* Opcode 0xf2 0x0f 0x75 - invalid */
3639
3640
3641/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3642FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3643{
3644 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3645 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3646}
3647
3648/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
3649FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
3650{
3651 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
3652 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3653}
3654
3655/* Opcode 0xf3 0x0f 0x76 - invalid */
3656/* Opcode 0xf2 0x0f 0x76 - invalid */
3657
3658
3659/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
3660FNIEMOP_STUB(iemOp_emms);
3661/* Opcode 0x66 0x0f 0x77 - invalid */
3662/* Opcode 0xf3 0x0f 0x77 - invalid */
3663/* Opcode 0xf2 0x0f 0x77 - invalid */
3664
3665/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3666FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3667/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3668FNIEMOP_STUB(iemOp_AmdGrp17);
3669/* Opcode 0xf3 0x0f 0x78 - invalid */
3670/* Opcode 0xf2 0x0f 0x78 - invalid */
3671
3672/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3673FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3674/* Opcode 0x66 0x0f 0x79 - invalid */
3675/* Opcode 0xf3 0x0f 0x79 - invalid */
3676/* Opcode 0xf2 0x0f 0x79 - invalid */
3677
3678/* Opcode 0x0f 0x7a - invalid */
3679/* Opcode 0x66 0x0f 0x7a - invalid */
3680/* Opcode 0xf3 0x0f 0x7a - invalid */
3681/* Opcode 0xf2 0x0f 0x7a - invalid */
3682
3683/* Opcode 0x0f 0x7b - invalid */
3684/* Opcode 0x66 0x0f 0x7b - invalid */
3685/* Opcode 0xf3 0x0f 0x7b - invalid */
3686/* Opcode 0xf2 0x0f 0x7b - invalid */
3687
3688/* Opcode 0x0f 0x7c - invalid */
3689/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
3690FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
3691/* Opcode 0xf3 0x0f 0x7c - invalid */
3692/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
3693FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
3694
3695/* Opcode 0x0f 0x7d - invalid */
3696/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
3697FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
3698/* Opcode 0xf3 0x0f 0x7d - invalid */
3699/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
3700FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
3701
3702
3703/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
3704FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
3705{
3706 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3707 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3708 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3709 else
3710 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3711 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3712 {
3713 /* greg, MMX */
3714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3715 IEM_MC_BEGIN(0, 1);
3716 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3717 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3718 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3719 {
3720 IEM_MC_LOCAL(uint64_t, u64Tmp);
3721 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3722 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3723 }
3724 else
3725 {
3726 IEM_MC_LOCAL(uint32_t, u32Tmp);
3727 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3728 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3729 }
3730 IEM_MC_ADVANCE_RIP();
3731 IEM_MC_END();
3732 }
3733 else
3734 {
3735 /* [mem], MMX */
3736 IEM_MC_BEGIN(0, 2);
3737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3738 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3741 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3742 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3743 {
3744 IEM_MC_LOCAL(uint64_t, u64Tmp);
3745 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3746 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3747 }
3748 else
3749 {
3750 IEM_MC_LOCAL(uint32_t, u32Tmp);
3751 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3752 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3753 }
3754 IEM_MC_ADVANCE_RIP();
3755 IEM_MC_END();
3756 }
3757 return VINF_SUCCESS;
3758}
3759
3760/** Opcode 0x66 0x0f 0x7e - movd_q Ey, Vy */
3761FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
3762{
3763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3764 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3765 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
3766 else
3767 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
3768 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3769 {
3770 /* greg, XMM */
3771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3772 IEM_MC_BEGIN(0, 1);
3773 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3774 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3775 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3776 {
3777 IEM_MC_LOCAL(uint64_t, u64Tmp);
3778 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3779 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3780 }
3781 else
3782 {
3783 IEM_MC_LOCAL(uint32_t, u32Tmp);
3784 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3785 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3786 }
3787 IEM_MC_ADVANCE_RIP();
3788 IEM_MC_END();
3789 }
3790 else
3791 {
3792 /* [mem], XMM */
3793 IEM_MC_BEGIN(0, 2);
3794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3795 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3798 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3799 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3800 {
3801 IEM_MC_LOCAL(uint64_t, u64Tmp);
3802 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3803 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3804 }
3805 else
3806 {
3807 IEM_MC_LOCAL(uint32_t, u32Tmp);
3808 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3809 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3810 }
3811 IEM_MC_ADVANCE_RIP();
3812 IEM_MC_END();
3813 }
3814 return VINF_SUCCESS;
3815}
3816
3817/** Opcode 0xf3 0x0f 0x7e - movq Vq, Wq */
3818FNIEMOP_STUB(iemOp_movq_Vq_Wq);
3819/* Opcode 0xf2 0x0f 0x7e - invalid */
3820
3821
3822/** Opcode 0x0f 0x7f - movq Qq, Pq */
3823FNIEMOP_DEF(iemOp_movq_Qq_Pq)
3824{
3825 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3826 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3827 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3828 {
3829 /*
3830 * Register, register.
3831 */
3832 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3833 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3835 IEM_MC_BEGIN(0, 1);
3836 IEM_MC_LOCAL(uint64_t, u64Tmp);
3837 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3838 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3839 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3840 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3841 IEM_MC_ADVANCE_RIP();
3842 IEM_MC_END();
3843 }
3844 else
3845 {
3846 /*
3847 * Register, memory.
3848 */
3849 IEM_MC_BEGIN(0, 2);
3850 IEM_MC_LOCAL(uint64_t, u64Tmp);
3851 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3852
3853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3855 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3856 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3857
3858 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3859 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3860
3861 IEM_MC_ADVANCE_RIP();
3862 IEM_MC_END();
3863 }
3864 return VINF_SUCCESS;
3865}
3866
3867/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
3868FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
3869{
3870 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
3871 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3872 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3873 {
3874 /*
3875 * Register, register.
3876 */
3877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3878 IEM_MC_BEGIN(0, 0);
3879 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3880 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3881 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3882 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3883 IEM_MC_ADVANCE_RIP();
3884 IEM_MC_END();
3885 }
3886 else
3887 {
3888 /*
3889 * Register, memory.
3890 */
3891 IEM_MC_BEGIN(0, 2);
3892 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3893 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3894
3895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3897 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3898 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3899
3900 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3901 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3902
3903 IEM_MC_ADVANCE_RIP();
3904 IEM_MC_END();
3905 }
3906 return VINF_SUCCESS;
3907}
3908
3909/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
3910FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
3911{
3912 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3913 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
3914 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3915 {
3916 /*
3917 * Register, register.
3918 */
3919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3920 IEM_MC_BEGIN(0, 0);
3921 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3922 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3923 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3924 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3925 IEM_MC_ADVANCE_RIP();
3926 IEM_MC_END();
3927 }
3928 else
3929 {
3930 /*
3931 * Register, memory.
3932 */
3933 IEM_MC_BEGIN(0, 2);
3934 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3935 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3936
3937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3939 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3940 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3941
3942 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3943 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3944
3945 IEM_MC_ADVANCE_RIP();
3946 IEM_MC_END();
3947 }
3948 return VINF_SUCCESS;
3949}
3950
3951/* Opcode 0xf2 0x0f 0x7f - invalid */
3952
3953
3954
3955/** Opcode 0x0f 0x80. */
3956FNIEMOP_DEF(iemOp_jo_Jv)
3957{
3958 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3959 IEMOP_HLP_MIN_386();
3960 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3961 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3962 {
3963 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3965
3966 IEM_MC_BEGIN(0, 0);
3967 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3968 IEM_MC_REL_JMP_S16(i16Imm);
3969 } IEM_MC_ELSE() {
3970 IEM_MC_ADVANCE_RIP();
3971 } IEM_MC_ENDIF();
3972 IEM_MC_END();
3973 }
3974 else
3975 {
3976 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3978
3979 IEM_MC_BEGIN(0, 0);
3980 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3981 IEM_MC_REL_JMP_S32(i32Imm);
3982 } IEM_MC_ELSE() {
3983 IEM_MC_ADVANCE_RIP();
3984 } IEM_MC_ENDIF();
3985 IEM_MC_END();
3986 }
3987 return VINF_SUCCESS;
3988}
3989
3990
3991/** Opcode 0x0f 0x81. */
3992FNIEMOP_DEF(iemOp_jno_Jv)
3993{
3994 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3995 IEMOP_HLP_MIN_386();
3996 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3997 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3998 {
3999 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4001
4002 IEM_MC_BEGIN(0, 0);
4003 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4004 IEM_MC_ADVANCE_RIP();
4005 } IEM_MC_ELSE() {
4006 IEM_MC_REL_JMP_S16(i16Imm);
4007 } IEM_MC_ENDIF();
4008 IEM_MC_END();
4009 }
4010 else
4011 {
4012 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4014
4015 IEM_MC_BEGIN(0, 0);
4016 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4017 IEM_MC_ADVANCE_RIP();
4018 } IEM_MC_ELSE() {
4019 IEM_MC_REL_JMP_S32(i32Imm);
4020 } IEM_MC_ENDIF();
4021 IEM_MC_END();
4022 }
4023 return VINF_SUCCESS;
4024}
4025
4026
4027/** Opcode 0x0f 0x82. */
4028FNIEMOP_DEF(iemOp_jc_Jv)
4029{
4030 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4031 IEMOP_HLP_MIN_386();
4032 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4033 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4034 {
4035 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4037
4038 IEM_MC_BEGIN(0, 0);
4039 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4040 IEM_MC_REL_JMP_S16(i16Imm);
4041 } IEM_MC_ELSE() {
4042 IEM_MC_ADVANCE_RIP();
4043 } IEM_MC_ENDIF();
4044 IEM_MC_END();
4045 }
4046 else
4047 {
4048 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4050
4051 IEM_MC_BEGIN(0, 0);
4052 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4053 IEM_MC_REL_JMP_S32(i32Imm);
4054 } IEM_MC_ELSE() {
4055 IEM_MC_ADVANCE_RIP();
4056 } IEM_MC_ENDIF();
4057 IEM_MC_END();
4058 }
4059 return VINF_SUCCESS;
4060}
4061
4062
4063/** Opcode 0x0f 0x83. */
4064FNIEMOP_DEF(iemOp_jnc_Jv)
4065{
4066 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4067 IEMOP_HLP_MIN_386();
4068 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4069 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4070 {
4071 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4073
4074 IEM_MC_BEGIN(0, 0);
4075 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4076 IEM_MC_ADVANCE_RIP();
4077 } IEM_MC_ELSE() {
4078 IEM_MC_REL_JMP_S16(i16Imm);
4079 } IEM_MC_ENDIF();
4080 IEM_MC_END();
4081 }
4082 else
4083 {
4084 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4086
4087 IEM_MC_BEGIN(0, 0);
4088 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4089 IEM_MC_ADVANCE_RIP();
4090 } IEM_MC_ELSE() {
4091 IEM_MC_REL_JMP_S32(i32Imm);
4092 } IEM_MC_ENDIF();
4093 IEM_MC_END();
4094 }
4095 return VINF_SUCCESS;
4096}
4097
4098
4099/** Opcode 0x0f 0x84. */
4100FNIEMOP_DEF(iemOp_je_Jv)
4101{
4102 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4103 IEMOP_HLP_MIN_386();
4104 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4105 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4106 {
4107 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4109
4110 IEM_MC_BEGIN(0, 0);
4111 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4112 IEM_MC_REL_JMP_S16(i16Imm);
4113 } IEM_MC_ELSE() {
4114 IEM_MC_ADVANCE_RIP();
4115 } IEM_MC_ENDIF();
4116 IEM_MC_END();
4117 }
4118 else
4119 {
4120 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4122
4123 IEM_MC_BEGIN(0, 0);
4124 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4125 IEM_MC_REL_JMP_S32(i32Imm);
4126 } IEM_MC_ELSE() {
4127 IEM_MC_ADVANCE_RIP();
4128 } IEM_MC_ENDIF();
4129 IEM_MC_END();
4130 }
4131 return VINF_SUCCESS;
4132}
4133
4134
4135/** Opcode 0x0f 0x85. */
4136FNIEMOP_DEF(iemOp_jne_Jv)
4137{
4138 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4139 IEMOP_HLP_MIN_386();
4140 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4141 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4142 {
4143 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4145
4146 IEM_MC_BEGIN(0, 0);
4147 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4148 IEM_MC_ADVANCE_RIP();
4149 } IEM_MC_ELSE() {
4150 IEM_MC_REL_JMP_S16(i16Imm);
4151 } IEM_MC_ENDIF();
4152 IEM_MC_END();
4153 }
4154 else
4155 {
4156 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4158
4159 IEM_MC_BEGIN(0, 0);
4160 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4161 IEM_MC_ADVANCE_RIP();
4162 } IEM_MC_ELSE() {
4163 IEM_MC_REL_JMP_S32(i32Imm);
4164 } IEM_MC_ENDIF();
4165 IEM_MC_END();
4166 }
4167 return VINF_SUCCESS;
4168}
4169
4170
4171/** Opcode 0x0f 0x86. */
4172FNIEMOP_DEF(iemOp_jbe_Jv)
4173{
4174 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4175 IEMOP_HLP_MIN_386();
4176 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4177 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4178 {
4179 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4181
4182 IEM_MC_BEGIN(0, 0);
4183 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4184 IEM_MC_REL_JMP_S16(i16Imm);
4185 } IEM_MC_ELSE() {
4186 IEM_MC_ADVANCE_RIP();
4187 } IEM_MC_ENDIF();
4188 IEM_MC_END();
4189 }
4190 else
4191 {
4192 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4194
4195 IEM_MC_BEGIN(0, 0);
4196 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4197 IEM_MC_REL_JMP_S32(i32Imm);
4198 } IEM_MC_ELSE() {
4199 IEM_MC_ADVANCE_RIP();
4200 } IEM_MC_ENDIF();
4201 IEM_MC_END();
4202 }
4203 return VINF_SUCCESS;
4204}
4205
4206
4207/** Opcode 0x0f 0x87. */
4208FNIEMOP_DEF(iemOp_jnbe_Jv)
4209{
4210 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4211 IEMOP_HLP_MIN_386();
4212 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4213 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4214 {
4215 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4217
4218 IEM_MC_BEGIN(0, 0);
4219 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4220 IEM_MC_ADVANCE_RIP();
4221 } IEM_MC_ELSE() {
4222 IEM_MC_REL_JMP_S16(i16Imm);
4223 } IEM_MC_ENDIF();
4224 IEM_MC_END();
4225 }
4226 else
4227 {
4228 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4230
4231 IEM_MC_BEGIN(0, 0);
4232 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4233 IEM_MC_ADVANCE_RIP();
4234 } IEM_MC_ELSE() {
4235 IEM_MC_REL_JMP_S32(i32Imm);
4236 } IEM_MC_ENDIF();
4237 IEM_MC_END();
4238 }
4239 return VINF_SUCCESS;
4240}
4241
4242
4243/** Opcode 0x0f 0x88. */
4244FNIEMOP_DEF(iemOp_js_Jv)
4245{
4246 IEMOP_MNEMONIC(js_Jv, "js Jv");
4247 IEMOP_HLP_MIN_386();
4248 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4249 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4250 {
4251 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4253
4254 IEM_MC_BEGIN(0, 0);
4255 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4256 IEM_MC_REL_JMP_S16(i16Imm);
4257 } IEM_MC_ELSE() {
4258 IEM_MC_ADVANCE_RIP();
4259 } IEM_MC_ENDIF();
4260 IEM_MC_END();
4261 }
4262 else
4263 {
4264 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4266
4267 IEM_MC_BEGIN(0, 0);
4268 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4269 IEM_MC_REL_JMP_S32(i32Imm);
4270 } IEM_MC_ELSE() {
4271 IEM_MC_ADVANCE_RIP();
4272 } IEM_MC_ENDIF();
4273 IEM_MC_END();
4274 }
4275 return VINF_SUCCESS;
4276}
4277
4278
4279/** Opcode 0x0f 0x89. */
4280FNIEMOP_DEF(iemOp_jns_Jv)
4281{
4282 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4283 IEMOP_HLP_MIN_386();
4284 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4285 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4286 {
4287 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4289
4290 IEM_MC_BEGIN(0, 0);
4291 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4292 IEM_MC_ADVANCE_RIP();
4293 } IEM_MC_ELSE() {
4294 IEM_MC_REL_JMP_S16(i16Imm);
4295 } IEM_MC_ENDIF();
4296 IEM_MC_END();
4297 }
4298 else
4299 {
4300 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4302
4303 IEM_MC_BEGIN(0, 0);
4304 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4305 IEM_MC_ADVANCE_RIP();
4306 } IEM_MC_ELSE() {
4307 IEM_MC_REL_JMP_S32(i32Imm);
4308 } IEM_MC_ENDIF();
4309 IEM_MC_END();
4310 }
4311 return VINF_SUCCESS;
4312}
4313
4314
4315/** Opcode 0x0f 0x8a. */
4316FNIEMOP_DEF(iemOp_jp_Jv)
4317{
4318 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4319 IEMOP_HLP_MIN_386();
4320 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4321 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4322 {
4323 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4325
4326 IEM_MC_BEGIN(0, 0);
4327 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4328 IEM_MC_REL_JMP_S16(i16Imm);
4329 } IEM_MC_ELSE() {
4330 IEM_MC_ADVANCE_RIP();
4331 } IEM_MC_ENDIF();
4332 IEM_MC_END();
4333 }
4334 else
4335 {
4336 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4338
4339 IEM_MC_BEGIN(0, 0);
4340 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4341 IEM_MC_REL_JMP_S32(i32Imm);
4342 } IEM_MC_ELSE() {
4343 IEM_MC_ADVANCE_RIP();
4344 } IEM_MC_ENDIF();
4345 IEM_MC_END();
4346 }
4347 return VINF_SUCCESS;
4348}
4349
4350
4351/** Opcode 0x0f 0x8b. */
4352FNIEMOP_DEF(iemOp_jnp_Jv)
4353{
4354 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4355 IEMOP_HLP_MIN_386();
4356 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4357 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4358 {
4359 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4361
4362 IEM_MC_BEGIN(0, 0);
4363 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4364 IEM_MC_ADVANCE_RIP();
4365 } IEM_MC_ELSE() {
4366 IEM_MC_REL_JMP_S16(i16Imm);
4367 } IEM_MC_ENDIF();
4368 IEM_MC_END();
4369 }
4370 else
4371 {
4372 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4374
4375 IEM_MC_BEGIN(0, 0);
4376 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4377 IEM_MC_ADVANCE_RIP();
4378 } IEM_MC_ELSE() {
4379 IEM_MC_REL_JMP_S32(i32Imm);
4380 } IEM_MC_ENDIF();
4381 IEM_MC_END();
4382 }
4383 return VINF_SUCCESS;
4384}
4385
4386
4387/** Opcode 0x0f 0x8c. */
4388FNIEMOP_DEF(iemOp_jl_Jv)
4389{
4390 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4391 IEMOP_HLP_MIN_386();
4392 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4393 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4394 {
4395 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4397
4398 IEM_MC_BEGIN(0, 0);
4399 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4400 IEM_MC_REL_JMP_S16(i16Imm);
4401 } IEM_MC_ELSE() {
4402 IEM_MC_ADVANCE_RIP();
4403 } IEM_MC_ENDIF();
4404 IEM_MC_END();
4405 }
4406 else
4407 {
4408 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4410
4411 IEM_MC_BEGIN(0, 0);
4412 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4413 IEM_MC_REL_JMP_S32(i32Imm);
4414 } IEM_MC_ELSE() {
4415 IEM_MC_ADVANCE_RIP();
4416 } IEM_MC_ENDIF();
4417 IEM_MC_END();
4418 }
4419 return VINF_SUCCESS;
4420}
4421
4422
4423/** Opcode 0x0f 0x8d. */
4424FNIEMOP_DEF(iemOp_jnl_Jv)
4425{
4426 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4427 IEMOP_HLP_MIN_386();
4428 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4429 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4430 {
4431 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4433
4434 IEM_MC_BEGIN(0, 0);
4435 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4436 IEM_MC_ADVANCE_RIP();
4437 } IEM_MC_ELSE() {
4438 IEM_MC_REL_JMP_S16(i16Imm);
4439 } IEM_MC_ENDIF();
4440 IEM_MC_END();
4441 }
4442 else
4443 {
4444 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4446
4447 IEM_MC_BEGIN(0, 0);
4448 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4449 IEM_MC_ADVANCE_RIP();
4450 } IEM_MC_ELSE() {
4451 IEM_MC_REL_JMP_S32(i32Imm);
4452 } IEM_MC_ENDIF();
4453 IEM_MC_END();
4454 }
4455 return VINF_SUCCESS;
4456}
4457
4458
4459/** Opcode 0x0f 0x8e. */
4460FNIEMOP_DEF(iemOp_jle_Jv)
4461{
4462 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4463 IEMOP_HLP_MIN_386();
4464 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4465 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4466 {
4467 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4469
4470 IEM_MC_BEGIN(0, 0);
4471 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4472 IEM_MC_REL_JMP_S16(i16Imm);
4473 } IEM_MC_ELSE() {
4474 IEM_MC_ADVANCE_RIP();
4475 } IEM_MC_ENDIF();
4476 IEM_MC_END();
4477 }
4478 else
4479 {
4480 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4482
4483 IEM_MC_BEGIN(0, 0);
4484 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4485 IEM_MC_REL_JMP_S32(i32Imm);
4486 } IEM_MC_ELSE() {
4487 IEM_MC_ADVANCE_RIP();
4488 } IEM_MC_ENDIF();
4489 IEM_MC_END();
4490 }
4491 return VINF_SUCCESS;
4492}
4493
4494
4495/** Opcode 0x0f 0x8f. */
4496FNIEMOP_DEF(iemOp_jnle_Jv)
4497{
4498 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4499 IEMOP_HLP_MIN_386();
4500 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4501 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4502 {
4503 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4505
4506 IEM_MC_BEGIN(0, 0);
4507 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4508 IEM_MC_ADVANCE_RIP();
4509 } IEM_MC_ELSE() {
4510 IEM_MC_REL_JMP_S16(i16Imm);
4511 } IEM_MC_ENDIF();
4512 IEM_MC_END();
4513 }
4514 else
4515 {
4516 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4518
4519 IEM_MC_BEGIN(0, 0);
4520 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4521 IEM_MC_ADVANCE_RIP();
4522 } IEM_MC_ELSE() {
4523 IEM_MC_REL_JMP_S32(i32Imm);
4524 } IEM_MC_ENDIF();
4525 IEM_MC_END();
4526 }
4527 return VINF_SUCCESS;
4528}
4529
4530
4531/** Opcode 0x0f 0x90. */
4532FNIEMOP_DEF(iemOp_seto_Eb)
4533{
4534 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4535 IEMOP_HLP_MIN_386();
4536 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4537
4538 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4539 * any way. AMD says it's "unused", whatever that means. We're
4540 * ignoring for now. */
4541 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4542 {
4543 /* register target */
4544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4545 IEM_MC_BEGIN(0, 0);
4546 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4547 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4548 } IEM_MC_ELSE() {
4549 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4550 } IEM_MC_ENDIF();
4551 IEM_MC_ADVANCE_RIP();
4552 IEM_MC_END();
4553 }
4554 else
4555 {
4556 /* memory target */
4557 IEM_MC_BEGIN(0, 1);
4558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4561 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4562 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4563 } IEM_MC_ELSE() {
4564 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4565 } IEM_MC_ENDIF();
4566 IEM_MC_ADVANCE_RIP();
4567 IEM_MC_END();
4568 }
4569 return VINF_SUCCESS;
4570}
4571
4572
4573/** Opcode 0x0f 0x91. */
4574FNIEMOP_DEF(iemOp_setno_Eb)
4575{
4576 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4577 IEMOP_HLP_MIN_386();
4578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4579
4580 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4581 * any way. AMD says it's "unused", whatever that means. We're
4582 * ignoring for now. */
4583 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4584 {
4585 /* register target */
4586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4587 IEM_MC_BEGIN(0, 0);
4588 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4589 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4590 } IEM_MC_ELSE() {
4591 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4592 } IEM_MC_ENDIF();
4593 IEM_MC_ADVANCE_RIP();
4594 IEM_MC_END();
4595 }
4596 else
4597 {
4598 /* memory target */
4599 IEM_MC_BEGIN(0, 1);
4600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4603 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4604 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4605 } IEM_MC_ELSE() {
4606 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4607 } IEM_MC_ENDIF();
4608 IEM_MC_ADVANCE_RIP();
4609 IEM_MC_END();
4610 }
4611 return VINF_SUCCESS;
4612}
4613
4614
4615/** Opcode 0x0f 0x92. */
4616FNIEMOP_DEF(iemOp_setc_Eb)
4617{
4618 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4619 IEMOP_HLP_MIN_386();
4620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4621
4622 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4623 * any way. AMD says it's "unused", whatever that means. We're
4624 * ignoring for now. */
4625 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4626 {
4627 /* register target */
4628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4629 IEM_MC_BEGIN(0, 0);
4630 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4631 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4632 } IEM_MC_ELSE() {
4633 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4634 } IEM_MC_ENDIF();
4635 IEM_MC_ADVANCE_RIP();
4636 IEM_MC_END();
4637 }
4638 else
4639 {
4640 /* memory target */
4641 IEM_MC_BEGIN(0, 1);
4642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4643 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4645 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4646 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4647 } IEM_MC_ELSE() {
4648 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4649 } IEM_MC_ENDIF();
4650 IEM_MC_ADVANCE_RIP();
4651 IEM_MC_END();
4652 }
4653 return VINF_SUCCESS;
4654}
4655
4656
4657/** Opcode 0x0f 0x93. */
4658FNIEMOP_DEF(iemOp_setnc_Eb)
4659{
4660 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4661 IEMOP_HLP_MIN_386();
4662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4663
4664 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4665 * any way. AMD says it's "unused", whatever that means. We're
4666 * ignoring for now. */
4667 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4668 {
4669 /* register target */
4670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4671 IEM_MC_BEGIN(0, 0);
4672 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4673 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4674 } IEM_MC_ELSE() {
4675 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4676 } IEM_MC_ENDIF();
4677 IEM_MC_ADVANCE_RIP();
4678 IEM_MC_END();
4679 }
4680 else
4681 {
4682 /* memory target */
4683 IEM_MC_BEGIN(0, 1);
4684 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4685 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4687 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4688 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4689 } IEM_MC_ELSE() {
4690 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4691 } IEM_MC_ENDIF();
4692 IEM_MC_ADVANCE_RIP();
4693 IEM_MC_END();
4694 }
4695 return VINF_SUCCESS;
4696}
4697
4698
4699/** Opcode 0x0f 0x94. */
4700FNIEMOP_DEF(iemOp_sete_Eb)
4701{
4702 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4703 IEMOP_HLP_MIN_386();
4704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4705
4706 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4707 * any way. AMD says it's "unused", whatever that means. We're
4708 * ignoring for now. */
4709 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4710 {
4711 /* register target */
4712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4713 IEM_MC_BEGIN(0, 0);
4714 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4715 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4716 } IEM_MC_ELSE() {
4717 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4718 } IEM_MC_ENDIF();
4719 IEM_MC_ADVANCE_RIP();
4720 IEM_MC_END();
4721 }
4722 else
4723 {
4724 /* memory target */
4725 IEM_MC_BEGIN(0, 1);
4726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4729 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4730 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4731 } IEM_MC_ELSE() {
4732 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4733 } IEM_MC_ENDIF();
4734 IEM_MC_ADVANCE_RIP();
4735 IEM_MC_END();
4736 }
4737 return VINF_SUCCESS;
4738}
4739
4740
4741/** Opcode 0x0f 0x95. */
4742FNIEMOP_DEF(iemOp_setne_Eb)
4743{
4744 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4745 IEMOP_HLP_MIN_386();
4746 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4747
4748 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4749 * any way. AMD says it's "unused", whatever that means. We're
4750 * ignoring for now. */
4751 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4752 {
4753 /* register target */
4754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4755 IEM_MC_BEGIN(0, 0);
4756 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4757 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4758 } IEM_MC_ELSE() {
4759 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4760 } IEM_MC_ENDIF();
4761 IEM_MC_ADVANCE_RIP();
4762 IEM_MC_END();
4763 }
4764 else
4765 {
4766 /* memory target */
4767 IEM_MC_BEGIN(0, 1);
4768 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4769 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4771 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4772 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4773 } IEM_MC_ELSE() {
4774 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4775 } IEM_MC_ENDIF();
4776 IEM_MC_ADVANCE_RIP();
4777 IEM_MC_END();
4778 }
4779 return VINF_SUCCESS;
4780}
4781
4782
4783/** Opcode 0x0f 0x96. */
4784FNIEMOP_DEF(iemOp_setbe_Eb)
4785{
4786 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4787 IEMOP_HLP_MIN_386();
4788 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4789
4790 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4791 * any way. AMD says it's "unused", whatever that means. We're
4792 * ignoring for now. */
4793 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4794 {
4795 /* register target */
4796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4797 IEM_MC_BEGIN(0, 0);
4798 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4799 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4800 } IEM_MC_ELSE() {
4801 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4802 } IEM_MC_ENDIF();
4803 IEM_MC_ADVANCE_RIP();
4804 IEM_MC_END();
4805 }
4806 else
4807 {
4808 /* memory target */
4809 IEM_MC_BEGIN(0, 1);
4810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4813 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4814 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4815 } IEM_MC_ELSE() {
4816 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4817 } IEM_MC_ENDIF();
4818 IEM_MC_ADVANCE_RIP();
4819 IEM_MC_END();
4820 }
4821 return VINF_SUCCESS;
4822}
4823
4824
4825/** Opcode 0x0f 0x97. */
4826FNIEMOP_DEF(iemOp_setnbe_Eb)
4827{
4828 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4829 IEMOP_HLP_MIN_386();
4830 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4831
4832 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4833 * any way. AMD says it's "unused", whatever that means. We're
4834 * ignoring for now. */
4835 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4836 {
4837 /* register target */
4838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4839 IEM_MC_BEGIN(0, 0);
4840 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4841 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4842 } IEM_MC_ELSE() {
4843 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4844 } IEM_MC_ENDIF();
4845 IEM_MC_ADVANCE_RIP();
4846 IEM_MC_END();
4847 }
4848 else
4849 {
4850 /* memory target */
4851 IEM_MC_BEGIN(0, 1);
4852 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4855 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4856 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4857 } IEM_MC_ELSE() {
4858 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4859 } IEM_MC_ENDIF();
4860 IEM_MC_ADVANCE_RIP();
4861 IEM_MC_END();
4862 }
4863 return VINF_SUCCESS;
4864}
4865
4866
4867/** Opcode 0x0f 0x98. */
4868FNIEMOP_DEF(iemOp_sets_Eb)
4869{
4870 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4871 IEMOP_HLP_MIN_386();
4872 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4873
4874 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4875 * any way. AMD says it's "unused", whatever that means. We're
4876 * ignoring for now. */
4877 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4878 {
4879 /* register target */
4880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4881 IEM_MC_BEGIN(0, 0);
4882 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4883 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4884 } IEM_MC_ELSE() {
4885 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4886 } IEM_MC_ENDIF();
4887 IEM_MC_ADVANCE_RIP();
4888 IEM_MC_END();
4889 }
4890 else
4891 {
4892 /* memory target */
4893 IEM_MC_BEGIN(0, 1);
4894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4897 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4898 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4899 } IEM_MC_ELSE() {
4900 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4901 } IEM_MC_ENDIF();
4902 IEM_MC_ADVANCE_RIP();
4903 IEM_MC_END();
4904 }
4905 return VINF_SUCCESS;
4906}
4907
4908
4909/** Opcode 0x0f 0x99. */
4910FNIEMOP_DEF(iemOp_setns_Eb)
4911{
4912 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4913 IEMOP_HLP_MIN_386();
4914 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4915
4916 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4917 * any way. AMD says it's "unused", whatever that means. We're
4918 * ignoring for now. */
4919 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4920 {
4921 /* register target */
4922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4923 IEM_MC_BEGIN(0, 0);
4924 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4925 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4926 } IEM_MC_ELSE() {
4927 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4928 } IEM_MC_ENDIF();
4929 IEM_MC_ADVANCE_RIP();
4930 IEM_MC_END();
4931 }
4932 else
4933 {
4934 /* memory target */
4935 IEM_MC_BEGIN(0, 1);
4936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4939 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4940 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4941 } IEM_MC_ELSE() {
4942 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4943 } IEM_MC_ENDIF();
4944 IEM_MC_ADVANCE_RIP();
4945 IEM_MC_END();
4946 }
4947 return VINF_SUCCESS;
4948}
4949
4950
4951/** Opcode 0x0f 0x9a. */
4952FNIEMOP_DEF(iemOp_setp_Eb)
4953{
4954 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4955 IEMOP_HLP_MIN_386();
4956 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4957
4958 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4959 * any way. AMD says it's "unused", whatever that means. We're
4960 * ignoring for now. */
4961 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4962 {
4963 /* register target */
4964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4965 IEM_MC_BEGIN(0, 0);
4966 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4967 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4968 } IEM_MC_ELSE() {
4969 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4970 } IEM_MC_ENDIF();
4971 IEM_MC_ADVANCE_RIP();
4972 IEM_MC_END();
4973 }
4974 else
4975 {
4976 /* memory target */
4977 IEM_MC_BEGIN(0, 1);
4978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4979 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4981 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4982 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4983 } IEM_MC_ELSE() {
4984 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4985 } IEM_MC_ENDIF();
4986 IEM_MC_ADVANCE_RIP();
4987 IEM_MC_END();
4988 }
4989 return VINF_SUCCESS;
4990}
4991
4992
4993/** Opcode 0x0f 0x9b. */
4994FNIEMOP_DEF(iemOp_setnp_Eb)
4995{
4996 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4997 IEMOP_HLP_MIN_386();
4998 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4999
5000 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5001 * any way. AMD says it's "unused", whatever that means. We're
5002 * ignoring for now. */
5003 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5004 {
5005 /* register target */
5006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5007 IEM_MC_BEGIN(0, 0);
5008 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5009 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5010 } IEM_MC_ELSE() {
5011 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5012 } IEM_MC_ENDIF();
5013 IEM_MC_ADVANCE_RIP();
5014 IEM_MC_END();
5015 }
5016 else
5017 {
5018 /* memory target */
5019 IEM_MC_BEGIN(0, 1);
5020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5023 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5024 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5025 } IEM_MC_ELSE() {
5026 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5027 } IEM_MC_ENDIF();
5028 IEM_MC_ADVANCE_RIP();
5029 IEM_MC_END();
5030 }
5031 return VINF_SUCCESS;
5032}
5033
5034
5035/** Opcode 0x0f 0x9c. */
5036FNIEMOP_DEF(iemOp_setl_Eb)
5037{
5038 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5039 IEMOP_HLP_MIN_386();
5040 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5041
5042 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5043 * any way. AMD says it's "unused", whatever that means. We're
5044 * ignoring for now. */
5045 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5046 {
5047 /* register target */
5048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5049 IEM_MC_BEGIN(0, 0);
5050 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5051 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5052 } IEM_MC_ELSE() {
5053 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5054 } IEM_MC_ENDIF();
5055 IEM_MC_ADVANCE_RIP();
5056 IEM_MC_END();
5057 }
5058 else
5059 {
5060 /* memory target */
5061 IEM_MC_BEGIN(0, 1);
5062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5063 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5065 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5066 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5067 } IEM_MC_ELSE() {
5068 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5069 } IEM_MC_ENDIF();
5070 IEM_MC_ADVANCE_RIP();
5071 IEM_MC_END();
5072 }
5073 return VINF_SUCCESS;
5074}
5075
5076
5077/** Opcode 0x0f 0x9d. */
5078FNIEMOP_DEF(iemOp_setnl_Eb)
5079{
5080 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5081 IEMOP_HLP_MIN_386();
5082 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5083
5084 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5085 * any way. AMD says it's "unused", whatever that means. We're
5086 * ignoring for now. */
5087 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5088 {
5089 /* register target */
5090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5091 IEM_MC_BEGIN(0, 0);
5092 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5093 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5094 } IEM_MC_ELSE() {
5095 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5096 } IEM_MC_ENDIF();
5097 IEM_MC_ADVANCE_RIP();
5098 IEM_MC_END();
5099 }
5100 else
5101 {
5102 /* memory target */
5103 IEM_MC_BEGIN(0, 1);
5104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5105 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5107 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5108 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5109 } IEM_MC_ELSE() {
5110 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5111 } IEM_MC_ENDIF();
5112 IEM_MC_ADVANCE_RIP();
5113 IEM_MC_END();
5114 }
5115 return VINF_SUCCESS;
5116}
5117
5118
5119/** Opcode 0x0f 0x9e. */
5120FNIEMOP_DEF(iemOp_setle_Eb)
5121{
5122 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5123 IEMOP_HLP_MIN_386();
5124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5125
5126 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5127 * any way. AMD says it's "unused", whatever that means. We're
5128 * ignoring for now. */
5129 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5130 {
5131 /* register target */
5132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5133 IEM_MC_BEGIN(0, 0);
5134 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5135 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5136 } IEM_MC_ELSE() {
5137 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5138 } IEM_MC_ENDIF();
5139 IEM_MC_ADVANCE_RIP();
5140 IEM_MC_END();
5141 }
5142 else
5143 {
5144 /* memory target */
5145 IEM_MC_BEGIN(0, 1);
5146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5149 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5150 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5151 } IEM_MC_ELSE() {
5152 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5153 } IEM_MC_ENDIF();
5154 IEM_MC_ADVANCE_RIP();
5155 IEM_MC_END();
5156 }
5157 return VINF_SUCCESS;
5158}
5159
5160
5161/** Opcode 0x0f 0x9f. */
5162FNIEMOP_DEF(iemOp_setnle_Eb)
5163{
5164 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5165 IEMOP_HLP_MIN_386();
5166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5167
5168 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5169 * any way. AMD says it's "unused", whatever that means. We're
5170 * ignoring for now. */
5171 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5172 {
5173 /* register target */
5174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5175 IEM_MC_BEGIN(0, 0);
5176 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5177 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5178 } IEM_MC_ELSE() {
5179 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5180 } IEM_MC_ENDIF();
5181 IEM_MC_ADVANCE_RIP();
5182 IEM_MC_END();
5183 }
5184 else
5185 {
5186 /* memory target */
5187 IEM_MC_BEGIN(0, 1);
5188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5191 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5192 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5193 } IEM_MC_ELSE() {
5194 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5195 } IEM_MC_ENDIF();
5196 IEM_MC_ADVANCE_RIP();
5197 IEM_MC_END();
5198 }
5199 return VINF_SUCCESS;
5200}
5201
5202
5203/**
5204 * Common 'push segment-register' helper.
5205 */
5206FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5207{
5208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5209 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5210 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5211
5212 switch (pVCpu->iem.s.enmEffOpSize)
5213 {
5214 case IEMMODE_16BIT:
5215 IEM_MC_BEGIN(0, 1);
5216 IEM_MC_LOCAL(uint16_t, u16Value);
5217 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5218 IEM_MC_PUSH_U16(u16Value);
5219 IEM_MC_ADVANCE_RIP();
5220 IEM_MC_END();
5221 break;
5222
5223 case IEMMODE_32BIT:
5224 IEM_MC_BEGIN(0, 1);
5225 IEM_MC_LOCAL(uint32_t, u32Value);
5226 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5227 IEM_MC_PUSH_U32_SREG(u32Value);
5228 IEM_MC_ADVANCE_RIP();
5229 IEM_MC_END();
5230 break;
5231
5232 case IEMMODE_64BIT:
5233 IEM_MC_BEGIN(0, 1);
5234 IEM_MC_LOCAL(uint64_t, u64Value);
5235 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5236 IEM_MC_PUSH_U64(u64Value);
5237 IEM_MC_ADVANCE_RIP();
5238 IEM_MC_END();
5239 break;
5240 }
5241
5242 return VINF_SUCCESS;
5243}
5244
5245
5246/** Opcode 0x0f 0xa0. */
5247FNIEMOP_DEF(iemOp_push_fs)
5248{
5249 IEMOP_MNEMONIC(push_fs, "push fs");
5250 IEMOP_HLP_MIN_386();
5251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5252 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5253}
5254
5255
5256/** Opcode 0x0f 0xa1. */
5257FNIEMOP_DEF(iemOp_pop_fs)
5258{
5259 IEMOP_MNEMONIC(pop_fs, "pop fs");
5260 IEMOP_HLP_MIN_386();
5261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5262 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5263}
5264
5265
5266/** Opcode 0x0f 0xa2. */
5267FNIEMOP_DEF(iemOp_cpuid)
5268{
5269 IEMOP_MNEMONIC(cpuid, "cpuid");
5270 IEMOP_HLP_MIN_486(); /* not all 486es. */
5271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5272 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5273}
5274
5275
5276/**
5277 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5278 * iemOp_bts_Ev_Gv.
5279 */
5280FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5281{
5282 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5283 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5284
5285 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5286 {
5287 /* register destination. */
5288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5289 switch (pVCpu->iem.s.enmEffOpSize)
5290 {
5291 case IEMMODE_16BIT:
5292 IEM_MC_BEGIN(3, 0);
5293 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5294 IEM_MC_ARG(uint16_t, u16Src, 1);
5295 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5296
5297 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5298 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5299 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5300 IEM_MC_REF_EFLAGS(pEFlags);
5301 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5302
5303 IEM_MC_ADVANCE_RIP();
5304 IEM_MC_END();
5305 return VINF_SUCCESS;
5306
5307 case IEMMODE_32BIT:
5308 IEM_MC_BEGIN(3, 0);
5309 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5310 IEM_MC_ARG(uint32_t, u32Src, 1);
5311 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5312
5313 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5314 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5315 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5316 IEM_MC_REF_EFLAGS(pEFlags);
5317 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5318
5319 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5320 IEM_MC_ADVANCE_RIP();
5321 IEM_MC_END();
5322 return VINF_SUCCESS;
5323
5324 case IEMMODE_64BIT:
5325 IEM_MC_BEGIN(3, 0);
5326 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5327 IEM_MC_ARG(uint64_t, u64Src, 1);
5328 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5329
5330 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5331 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5332 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5333 IEM_MC_REF_EFLAGS(pEFlags);
5334 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5335
5336 IEM_MC_ADVANCE_RIP();
5337 IEM_MC_END();
5338 return VINF_SUCCESS;
5339
5340 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5341 }
5342 }
5343 else
5344 {
5345 /* memory destination. */
5346
5347 uint32_t fAccess;
5348 if (pImpl->pfnLockedU16)
5349 fAccess = IEM_ACCESS_DATA_RW;
5350 else /* BT */
5351 fAccess = IEM_ACCESS_DATA_R;
5352
5353 /** @todo test negative bit offsets! */
5354 switch (pVCpu->iem.s.enmEffOpSize)
5355 {
5356 case IEMMODE_16BIT:
5357 IEM_MC_BEGIN(3, 2);
5358 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5359 IEM_MC_ARG(uint16_t, u16Src, 1);
5360 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5362 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5363
5364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5365 if (pImpl->pfnLockedU16)
5366 IEMOP_HLP_DONE_DECODING();
5367 else
5368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5369 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5370 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5371 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5372 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5373 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5374 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5375 IEM_MC_FETCH_EFLAGS(EFlags);
5376
5377 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5378 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5379 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5380 else
5381 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5382 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5383
5384 IEM_MC_COMMIT_EFLAGS(EFlags);
5385 IEM_MC_ADVANCE_RIP();
5386 IEM_MC_END();
5387 return VINF_SUCCESS;
5388
5389 case IEMMODE_32BIT:
5390 IEM_MC_BEGIN(3, 2);
5391 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5392 IEM_MC_ARG(uint32_t, u32Src, 1);
5393 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5395 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5396
5397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5398 if (pImpl->pfnLockedU16)
5399 IEMOP_HLP_DONE_DECODING();
5400 else
5401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5402 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5403 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5404 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5405 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5406 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5407 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5408 IEM_MC_FETCH_EFLAGS(EFlags);
5409
5410 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5411 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5412 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5413 else
5414 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5415 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5416
5417 IEM_MC_COMMIT_EFLAGS(EFlags);
5418 IEM_MC_ADVANCE_RIP();
5419 IEM_MC_END();
5420 return VINF_SUCCESS;
5421
5422 case IEMMODE_64BIT:
5423 IEM_MC_BEGIN(3, 2);
5424 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5425 IEM_MC_ARG(uint64_t, u64Src, 1);
5426 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5428 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5429
5430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5431 if (pImpl->pfnLockedU16)
5432 IEMOP_HLP_DONE_DECODING();
5433 else
5434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5435 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5436 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5437 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5438 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5439 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5440 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5441 IEM_MC_FETCH_EFLAGS(EFlags);
5442
5443 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5444 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5445 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5446 else
5447 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5448 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5449
5450 IEM_MC_COMMIT_EFLAGS(EFlags);
5451 IEM_MC_ADVANCE_RIP();
5452 IEM_MC_END();
5453 return VINF_SUCCESS;
5454
5455 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5456 }
5457 }
5458}
5459
5460
5461/** Opcode 0x0f 0xa3. */
5462FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5463{
5464 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5465 IEMOP_HLP_MIN_386();
5466 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5467}
5468
5469
5470/**
5471 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5472 */
5473FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5474{
5475 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5476 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5477
5478 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5479 {
5480 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5482
5483 switch (pVCpu->iem.s.enmEffOpSize)
5484 {
5485 case IEMMODE_16BIT:
5486 IEM_MC_BEGIN(4, 0);
5487 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5488 IEM_MC_ARG(uint16_t, u16Src, 1);
5489 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5490 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5491
5492 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5493 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5494 IEM_MC_REF_EFLAGS(pEFlags);
5495 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5496
5497 IEM_MC_ADVANCE_RIP();
5498 IEM_MC_END();
5499 return VINF_SUCCESS;
5500
5501 case IEMMODE_32BIT:
5502 IEM_MC_BEGIN(4, 0);
5503 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5504 IEM_MC_ARG(uint32_t, u32Src, 1);
5505 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5506 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5507
5508 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5509 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5510 IEM_MC_REF_EFLAGS(pEFlags);
5511 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5512
5513 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5514 IEM_MC_ADVANCE_RIP();
5515 IEM_MC_END();
5516 return VINF_SUCCESS;
5517
5518 case IEMMODE_64BIT:
5519 IEM_MC_BEGIN(4, 0);
5520 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5521 IEM_MC_ARG(uint64_t, u64Src, 1);
5522 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5523 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5524
5525 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5526 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5527 IEM_MC_REF_EFLAGS(pEFlags);
5528 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5529
5530 IEM_MC_ADVANCE_RIP();
5531 IEM_MC_END();
5532 return VINF_SUCCESS;
5533
5534 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5535 }
5536 }
5537 else
5538 {
5539 switch (pVCpu->iem.s.enmEffOpSize)
5540 {
5541 case IEMMODE_16BIT:
5542 IEM_MC_BEGIN(4, 2);
5543 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5544 IEM_MC_ARG(uint16_t, u16Src, 1);
5545 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5546 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5548
5549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5550 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5551 IEM_MC_ASSIGN(cShiftArg, cShift);
5552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5553 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5554 IEM_MC_FETCH_EFLAGS(EFlags);
5555 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5556 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5557
5558 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5559 IEM_MC_COMMIT_EFLAGS(EFlags);
5560 IEM_MC_ADVANCE_RIP();
5561 IEM_MC_END();
5562 return VINF_SUCCESS;
5563
5564 case IEMMODE_32BIT:
5565 IEM_MC_BEGIN(4, 2);
5566 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5567 IEM_MC_ARG(uint32_t, u32Src, 1);
5568 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5569 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5571
5572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5573 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5574 IEM_MC_ASSIGN(cShiftArg, cShift);
5575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5576 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5577 IEM_MC_FETCH_EFLAGS(EFlags);
5578 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5579 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5580
5581 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5582 IEM_MC_COMMIT_EFLAGS(EFlags);
5583 IEM_MC_ADVANCE_RIP();
5584 IEM_MC_END();
5585 return VINF_SUCCESS;
5586
5587 case IEMMODE_64BIT:
5588 IEM_MC_BEGIN(4, 2);
5589 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5590 IEM_MC_ARG(uint64_t, u64Src, 1);
5591 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5592 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5593 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5594
5595 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5596 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5597 IEM_MC_ASSIGN(cShiftArg, cShift);
5598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5599 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5600 IEM_MC_FETCH_EFLAGS(EFlags);
5601 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5602 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5603
5604 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5605 IEM_MC_COMMIT_EFLAGS(EFlags);
5606 IEM_MC_ADVANCE_RIP();
5607 IEM_MC_END();
5608 return VINF_SUCCESS;
5609
5610 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5611 }
5612 }
5613}
5614
5615
5616/**
5617 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5618 */
5619FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5620{
5621 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5622 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5623
5624 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5625 {
5626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5627
5628 switch (pVCpu->iem.s.enmEffOpSize)
5629 {
5630 case IEMMODE_16BIT:
5631 IEM_MC_BEGIN(4, 0);
5632 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5633 IEM_MC_ARG(uint16_t, u16Src, 1);
5634 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5635 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5636
5637 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5638 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5639 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5640 IEM_MC_REF_EFLAGS(pEFlags);
5641 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5642
5643 IEM_MC_ADVANCE_RIP();
5644 IEM_MC_END();
5645 return VINF_SUCCESS;
5646
5647 case IEMMODE_32BIT:
5648 IEM_MC_BEGIN(4, 0);
5649 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5650 IEM_MC_ARG(uint32_t, u32Src, 1);
5651 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5652 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5653
5654 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5655 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5656 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5657 IEM_MC_REF_EFLAGS(pEFlags);
5658 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5659
5660 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5661 IEM_MC_ADVANCE_RIP();
5662 IEM_MC_END();
5663 return VINF_SUCCESS;
5664
5665 case IEMMODE_64BIT:
5666 IEM_MC_BEGIN(4, 0);
5667 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5668 IEM_MC_ARG(uint64_t, u64Src, 1);
5669 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5670 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5671
5672 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5673 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5674 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5675 IEM_MC_REF_EFLAGS(pEFlags);
5676 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5677
5678 IEM_MC_ADVANCE_RIP();
5679 IEM_MC_END();
5680 return VINF_SUCCESS;
5681
5682 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5683 }
5684 }
5685 else
5686 {
5687 switch (pVCpu->iem.s.enmEffOpSize)
5688 {
5689 case IEMMODE_16BIT:
5690 IEM_MC_BEGIN(4, 2);
5691 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5692 IEM_MC_ARG(uint16_t, u16Src, 1);
5693 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5694 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5696
5697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5699 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5700 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5701 IEM_MC_FETCH_EFLAGS(EFlags);
5702 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5703 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5704
5705 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5706 IEM_MC_COMMIT_EFLAGS(EFlags);
5707 IEM_MC_ADVANCE_RIP();
5708 IEM_MC_END();
5709 return VINF_SUCCESS;
5710
5711 case IEMMODE_32BIT:
5712 IEM_MC_BEGIN(4, 2);
5713 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5714 IEM_MC_ARG(uint32_t, u32Src, 1);
5715 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5716 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5717 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5718
5719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5721 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5722 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5723 IEM_MC_FETCH_EFLAGS(EFlags);
5724 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5725 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5726
5727 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5728 IEM_MC_COMMIT_EFLAGS(EFlags);
5729 IEM_MC_ADVANCE_RIP();
5730 IEM_MC_END();
5731 return VINF_SUCCESS;
5732
5733 case IEMMODE_64BIT:
5734 IEM_MC_BEGIN(4, 2);
5735 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5736 IEM_MC_ARG(uint64_t, u64Src, 1);
5737 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5738 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5740
5741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5743 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5744 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5745 IEM_MC_FETCH_EFLAGS(EFlags);
5746 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5747 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5748
5749 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5750 IEM_MC_COMMIT_EFLAGS(EFlags);
5751 IEM_MC_ADVANCE_RIP();
5752 IEM_MC_END();
5753 return VINF_SUCCESS;
5754
5755 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5756 }
5757 }
5758}
5759
5760
5761
5762/** Opcode 0x0f 0xa4. */
5763FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5764{
5765 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5766 IEMOP_HLP_MIN_386();
5767 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5768}
5769
5770
5771/** Opcode 0x0f 0xa5. */
5772FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5773{
5774 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5775 IEMOP_HLP_MIN_386();
5776 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5777}
5778
5779
5780/** Opcode 0x0f 0xa8. */
5781FNIEMOP_DEF(iemOp_push_gs)
5782{
5783 IEMOP_MNEMONIC(push_gs, "push gs");
5784 IEMOP_HLP_MIN_386();
5785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5786 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5787}
5788
5789
5790/** Opcode 0x0f 0xa9. */
5791FNIEMOP_DEF(iemOp_pop_gs)
5792{
5793 IEMOP_MNEMONIC(pop_gs, "pop gs");
5794 IEMOP_HLP_MIN_386();
5795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5796 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5797}
5798
5799
5800/** Opcode 0x0f 0xaa. */
5801FNIEMOP_STUB(iemOp_rsm);
5802//IEMOP_HLP_MIN_386();
5803
5804
5805/** Opcode 0x0f 0xab. */
5806FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5807{
5808 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5809 IEMOP_HLP_MIN_386();
5810 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5811}
5812
5813
5814/** Opcode 0x0f 0xac. */
5815FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5816{
5817 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5818 IEMOP_HLP_MIN_386();
5819 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5820}
5821
5822
5823/** Opcode 0x0f 0xad. */
5824FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5825{
5826 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5827 IEMOP_HLP_MIN_386();
5828 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5829}
5830
5831
5832/** Opcode 0x0f 0xae mem/0. */
5833FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5834{
5835 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5836 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5837 return IEMOP_RAISE_INVALID_OPCODE();
5838
5839 IEM_MC_BEGIN(3, 1);
5840 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5841 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5842 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5845 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
5846 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5847 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5848 IEM_MC_END();
5849 return VINF_SUCCESS;
5850}
5851
5852
5853/** Opcode 0x0f 0xae mem/1. */
5854FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5855{
5856 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5857 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5858 return IEMOP_RAISE_INVALID_OPCODE();
5859
5860 IEM_MC_BEGIN(3, 1);
5861 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5862 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5863 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5866 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5867 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5868 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5869 IEM_MC_END();
5870 return VINF_SUCCESS;
5871}
5872
5873
5874/**
5875 * @opmaps grp15
5876 * @opcode !11/2
5877 * @oppfx none
5878 * @opcpuid sse
5879 * @opgroup og_sse_mxcsrsm
5880 * @opxcpttype 5
5881 * @optest op1=0 -> mxcsr=0
5882 * @optest op1=0x2083 -> mxcsr=0x2083
5883 * @optest op1=0xfffffffe -> value.xcpt=0xd
5884 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
5885 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
5886 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
5887 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
5888 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
5889 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5890 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5891 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5892 */
5893FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
5894{
5895 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
5896 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
5897 return IEMOP_RAISE_INVALID_OPCODE();
5898
5899 IEM_MC_BEGIN(2, 0);
5900 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5901 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5904 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5905 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5906 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
5907 IEM_MC_END();
5908 return VINF_SUCCESS;
5909}
5910
5911
5912/**
5913 * @opmaps grp15
5914 * @opcode !11/3
5915 * @oppfx none
5916 * @opcpuid sse
5917 * @opgroup og_sse_mxcsrsm
5918 * @opxcpttype 5
5919 * @optest mxcsr=0 -> op1=0
5920 * @optest mxcsr=0x2083 -> op1=0x2083
5921 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
5922 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
5923 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
5924 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
5925 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
5926 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5927 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5928 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5929 */
5930FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
5931{
5932 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, MdWO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
5933 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
5934 return IEMOP_RAISE_INVALID_OPCODE();
5935
5936 IEM_MC_BEGIN(2, 0);
5937 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5938 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5941 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5942 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5943 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
5944 IEM_MC_END();
5945 return VINF_SUCCESS;
5946}
5947
5948
5949/**
5950 * @opmaps grp15
5951 * @opcode !11/4
5952 * @oppfx none
5953 * @opcpuid xsave
5954 * @opgroup og_system
5955 * @opxcpttype none
5956 */
5957FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
5958{
5959 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, MRW, DISOPTYPE_HARMLESS, 0);
5960 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
5961 return IEMOP_RAISE_INVALID_OPCODE();
5962
5963 IEM_MC_BEGIN(3, 0);
5964 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5965 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5966 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5967 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5969 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
5970 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5971 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
5972 IEM_MC_END();
5973 return VINF_SUCCESS;
5974}
5975
5976
5977/**
5978 * @opmaps grp15
5979 * @opcode !11/5
5980 * @oppfx none
5981 * @opcpuid xsave
5982 * @opgroup og_system
5983 * @opxcpttype none
5984 */
5985FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
5986{
5987 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, MRO, DISOPTYPE_HARMLESS, 0);
5988 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
5989 return IEMOP_RAISE_INVALID_OPCODE();
5990
5991 IEM_MC_BEGIN(3, 0);
5992 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5993 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5994 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5997 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
5998 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5999 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6000 IEM_MC_END();
6001 return VINF_SUCCESS;
6002}
6003
6004/** Opcode 0x0f 0xae mem/6. */
6005FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6006
6007/**
6008 * @opmaps grp15
6009 * @opcode !11/7
6010 * @oppfx none
6011 * @opcpuid clfsh
6012 * @opgroup og_cachectl
6013 * @optest op1=1 ->
6014 */
6015FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6016{
6017 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6018 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6019 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6020
6021 IEM_MC_BEGIN(2, 0);
6022 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6023 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6026 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6027 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6028 IEM_MC_END();
6029 return VINF_SUCCESS;
6030}
6031
6032/**
6033 * @opmaps grp15
6034 * @opcode !11/7
6035 * @oppfx 0x66
6036 * @opcpuid clflushopt
6037 * @opgroup og_cachectl
6038 * @optest op1=1 ->
6039 */
6040FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6041{
6042 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6043 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6044 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6045
6046 IEM_MC_BEGIN(2, 0);
6047 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6048 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6051 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6052 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6053 IEM_MC_END();
6054 return VINF_SUCCESS;
6055}
6056
6057
6058/** Opcode 0x0f 0xae 11b/5. */
6059FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6060{
6061 RT_NOREF_PV(bRm);
6062 IEMOP_MNEMONIC(lfence, "lfence");
6063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6064 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6065 return IEMOP_RAISE_INVALID_OPCODE();
6066
6067 IEM_MC_BEGIN(0, 0);
6068 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6069 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6070 else
6071 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6072 IEM_MC_ADVANCE_RIP();
6073 IEM_MC_END();
6074 return VINF_SUCCESS;
6075}
6076
6077
6078/** Opcode 0x0f 0xae 11b/6. */
6079FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6080{
6081 RT_NOREF_PV(bRm);
6082 IEMOP_MNEMONIC(mfence, "mfence");
6083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6084 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6085 return IEMOP_RAISE_INVALID_OPCODE();
6086
6087 IEM_MC_BEGIN(0, 0);
6088 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6089 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6090 else
6091 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6092 IEM_MC_ADVANCE_RIP();
6093 IEM_MC_END();
6094 return VINF_SUCCESS;
6095}
6096
6097
6098/** Opcode 0x0f 0xae 11b/7. */
6099FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6100{
6101 RT_NOREF_PV(bRm);
6102 IEMOP_MNEMONIC(sfence, "sfence");
6103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6104 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6105 return IEMOP_RAISE_INVALID_OPCODE();
6106
6107 IEM_MC_BEGIN(0, 0);
6108 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6109 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6110 else
6111 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6112 IEM_MC_ADVANCE_RIP();
6113 IEM_MC_END();
6114 return VINF_SUCCESS;
6115}
6116
6117
6118/** Opcode 0xf3 0x0f 0xae 11b/0. */
6119FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6120
6121/** Opcode 0xf3 0x0f 0xae 11b/1. */
6122FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6123
6124/** Opcode 0xf3 0x0f 0xae 11b/2. */
6125FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6126
6127/** Opcode 0xf3 0x0f 0xae 11b/3. */
6128FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6129
6130
6131/**
6132 * Group 15 jump table for register variant.
6133 */
6134IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
6135{ /* pfx: none, 066h, 0f3h, 0f2h */
6136 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
6137 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
6138 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
6139 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
6140 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
6141 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6142 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6143 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6144};
6145AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
6146
6147
6148/**
6149 * Group 15 jump table for memory variant.
6150 */
6151IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
6152{ /* pfx: none, 066h, 0f3h, 0f2h */
6153 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6154 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6155 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6156 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6157 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6158 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6159 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6160 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6161};
6162AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
6163
6164
6165/** Opcode 0x0f 0xae. */
6166FNIEMOP_DEF(iemOp_Grp15)
6167{
6168 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6169 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6170 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6171 /* register, register */
6172 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6173 + pVCpu->iem.s.idxPrefix], bRm);
6174 /* memory, register */
6175 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6176 + pVCpu->iem.s.idxPrefix], bRm);
6177}
6178
6179
6180/** Opcode 0x0f 0xaf. */
6181FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6182{
6183 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6184 IEMOP_HLP_MIN_386();
6185 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6186 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6187}
6188
6189
6190/** Opcode 0x0f 0xb0. */
6191FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6192{
6193 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6194 IEMOP_HLP_MIN_486();
6195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6196
6197 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6198 {
6199 IEMOP_HLP_DONE_DECODING();
6200 IEM_MC_BEGIN(4, 0);
6201 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6202 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6203 IEM_MC_ARG(uint8_t, u8Src, 2);
6204 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6205
6206 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6207 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6208 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6209 IEM_MC_REF_EFLAGS(pEFlags);
6210 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6211 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6212 else
6213 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6214
6215 IEM_MC_ADVANCE_RIP();
6216 IEM_MC_END();
6217 }
6218 else
6219 {
6220 IEM_MC_BEGIN(4, 3);
6221 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6222 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6223 IEM_MC_ARG(uint8_t, u8Src, 2);
6224 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6226 IEM_MC_LOCAL(uint8_t, u8Al);
6227
6228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6229 IEMOP_HLP_DONE_DECODING();
6230 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6231 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6232 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6233 IEM_MC_FETCH_EFLAGS(EFlags);
6234 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6235 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6236 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6237 else
6238 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6239
6240 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6241 IEM_MC_COMMIT_EFLAGS(EFlags);
6242 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6243 IEM_MC_ADVANCE_RIP();
6244 IEM_MC_END();
6245 }
6246 return VINF_SUCCESS;
6247}
6248
6249/** Opcode 0x0f 0xb1. */
6250FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6251{
6252 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6253 IEMOP_HLP_MIN_486();
6254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6255
6256 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6257 {
6258 IEMOP_HLP_DONE_DECODING();
6259 switch (pVCpu->iem.s.enmEffOpSize)
6260 {
6261 case IEMMODE_16BIT:
6262 IEM_MC_BEGIN(4, 0);
6263 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6264 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6265 IEM_MC_ARG(uint16_t, u16Src, 2);
6266 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6267
6268 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6269 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6270 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6271 IEM_MC_REF_EFLAGS(pEFlags);
6272 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6273 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6274 else
6275 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6276
6277 IEM_MC_ADVANCE_RIP();
6278 IEM_MC_END();
6279 return VINF_SUCCESS;
6280
6281 case IEMMODE_32BIT:
6282 IEM_MC_BEGIN(4, 0);
6283 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6284 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6285 IEM_MC_ARG(uint32_t, u32Src, 2);
6286 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6287
6288 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6289 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6290 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6291 IEM_MC_REF_EFLAGS(pEFlags);
6292 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6293 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6294 else
6295 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6296
6297 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6298 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6299 IEM_MC_ADVANCE_RIP();
6300 IEM_MC_END();
6301 return VINF_SUCCESS;
6302
6303 case IEMMODE_64BIT:
6304 IEM_MC_BEGIN(4, 0);
6305 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6306 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6307#ifdef RT_ARCH_X86
6308 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6309#else
6310 IEM_MC_ARG(uint64_t, u64Src, 2);
6311#endif
6312 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6313
6314 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6315 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6316 IEM_MC_REF_EFLAGS(pEFlags);
6317#ifdef RT_ARCH_X86
6318 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6319 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6320 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6321 else
6322 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6323#else
6324 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6325 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6326 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6327 else
6328 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6329#endif
6330
6331 IEM_MC_ADVANCE_RIP();
6332 IEM_MC_END();
6333 return VINF_SUCCESS;
6334
6335 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6336 }
6337 }
6338 else
6339 {
6340 switch (pVCpu->iem.s.enmEffOpSize)
6341 {
6342 case IEMMODE_16BIT:
6343 IEM_MC_BEGIN(4, 3);
6344 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6345 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6346 IEM_MC_ARG(uint16_t, u16Src, 2);
6347 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6349 IEM_MC_LOCAL(uint16_t, u16Ax);
6350
6351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6352 IEMOP_HLP_DONE_DECODING();
6353 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6354 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6355 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6356 IEM_MC_FETCH_EFLAGS(EFlags);
6357 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6358 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6359 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6360 else
6361 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6362
6363 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6364 IEM_MC_COMMIT_EFLAGS(EFlags);
6365 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6366 IEM_MC_ADVANCE_RIP();
6367 IEM_MC_END();
6368 return VINF_SUCCESS;
6369
6370 case IEMMODE_32BIT:
6371 IEM_MC_BEGIN(4, 3);
6372 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6373 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6374 IEM_MC_ARG(uint32_t, u32Src, 2);
6375 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6377 IEM_MC_LOCAL(uint32_t, u32Eax);
6378
6379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6380 IEMOP_HLP_DONE_DECODING();
6381 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6382 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6383 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6384 IEM_MC_FETCH_EFLAGS(EFlags);
6385 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6386 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6387 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6388 else
6389 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6390
6391 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6392 IEM_MC_COMMIT_EFLAGS(EFlags);
6393 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6394 IEM_MC_ADVANCE_RIP();
6395 IEM_MC_END();
6396 return VINF_SUCCESS;
6397
6398 case IEMMODE_64BIT:
6399 IEM_MC_BEGIN(4, 3);
6400 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6401 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6402#ifdef RT_ARCH_X86
6403 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6404#else
6405 IEM_MC_ARG(uint64_t, u64Src, 2);
6406#endif
6407 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6409 IEM_MC_LOCAL(uint64_t, u64Rax);
6410
6411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6412 IEMOP_HLP_DONE_DECODING();
6413 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6414 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6415 IEM_MC_FETCH_EFLAGS(EFlags);
6416 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6417#ifdef RT_ARCH_X86
6418 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6419 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6420 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6421 else
6422 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6423#else
6424 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6425 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6426 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6427 else
6428 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6429#endif
6430
6431 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6432 IEM_MC_COMMIT_EFLAGS(EFlags);
6433 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6434 IEM_MC_ADVANCE_RIP();
6435 IEM_MC_END();
6436 return VINF_SUCCESS;
6437
6438 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6439 }
6440 }
6441}
6442
6443
6444FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6445{
6446 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6447 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6448
6449 switch (pVCpu->iem.s.enmEffOpSize)
6450 {
6451 case IEMMODE_16BIT:
6452 IEM_MC_BEGIN(5, 1);
6453 IEM_MC_ARG(uint16_t, uSel, 0);
6454 IEM_MC_ARG(uint16_t, offSeg, 1);
6455 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6456 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6457 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6458 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6461 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6462 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6463 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6464 IEM_MC_END();
6465 return VINF_SUCCESS;
6466
6467 case IEMMODE_32BIT:
6468 IEM_MC_BEGIN(5, 1);
6469 IEM_MC_ARG(uint16_t, uSel, 0);
6470 IEM_MC_ARG(uint32_t, offSeg, 1);
6471 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6472 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6473 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6474 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6477 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6478 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6479 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6480 IEM_MC_END();
6481 return VINF_SUCCESS;
6482
6483 case IEMMODE_64BIT:
6484 IEM_MC_BEGIN(5, 1);
6485 IEM_MC_ARG(uint16_t, uSel, 0);
6486 IEM_MC_ARG(uint64_t, offSeg, 1);
6487 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6488 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6489 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6490 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6493 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6494 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6495 else
6496 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6497 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6498 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6499 IEM_MC_END();
6500 return VINF_SUCCESS;
6501
6502 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6503 }
6504}
6505
6506
6507/** Opcode 0x0f 0xb2. */
6508FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6509{
6510 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6511 IEMOP_HLP_MIN_386();
6512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6513 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6514 return IEMOP_RAISE_INVALID_OPCODE();
6515 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6516}
6517
6518
6519/** Opcode 0x0f 0xb3. */
6520FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6521{
6522 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6523 IEMOP_HLP_MIN_386();
6524 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6525}
6526
6527
6528/** Opcode 0x0f 0xb4. */
6529FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6530{
6531 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6532 IEMOP_HLP_MIN_386();
6533 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6534 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6535 return IEMOP_RAISE_INVALID_OPCODE();
6536 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6537}
6538
6539
6540/** Opcode 0x0f 0xb5. */
6541FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6542{
6543 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6544 IEMOP_HLP_MIN_386();
6545 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6547 return IEMOP_RAISE_INVALID_OPCODE();
6548 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6549}
6550
6551
6552/** Opcode 0x0f 0xb6. */
6553FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6554{
6555 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6556 IEMOP_HLP_MIN_386();
6557
6558 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6559
6560 /*
6561 * If rm is denoting a register, no more instruction bytes.
6562 */
6563 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6564 {
6565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6566 switch (pVCpu->iem.s.enmEffOpSize)
6567 {
6568 case IEMMODE_16BIT:
6569 IEM_MC_BEGIN(0, 1);
6570 IEM_MC_LOCAL(uint16_t, u16Value);
6571 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6572 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6573 IEM_MC_ADVANCE_RIP();
6574 IEM_MC_END();
6575 return VINF_SUCCESS;
6576
6577 case IEMMODE_32BIT:
6578 IEM_MC_BEGIN(0, 1);
6579 IEM_MC_LOCAL(uint32_t, u32Value);
6580 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6581 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6582 IEM_MC_ADVANCE_RIP();
6583 IEM_MC_END();
6584 return VINF_SUCCESS;
6585
6586 case IEMMODE_64BIT:
6587 IEM_MC_BEGIN(0, 1);
6588 IEM_MC_LOCAL(uint64_t, u64Value);
6589 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6590 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6591 IEM_MC_ADVANCE_RIP();
6592 IEM_MC_END();
6593 return VINF_SUCCESS;
6594
6595 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6596 }
6597 }
6598 else
6599 {
6600 /*
6601 * We're loading a register from memory.
6602 */
6603 switch (pVCpu->iem.s.enmEffOpSize)
6604 {
6605 case IEMMODE_16BIT:
6606 IEM_MC_BEGIN(0, 2);
6607 IEM_MC_LOCAL(uint16_t, u16Value);
6608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6611 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6612 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6613 IEM_MC_ADVANCE_RIP();
6614 IEM_MC_END();
6615 return VINF_SUCCESS;
6616
6617 case IEMMODE_32BIT:
6618 IEM_MC_BEGIN(0, 2);
6619 IEM_MC_LOCAL(uint32_t, u32Value);
6620 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6623 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6624 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6625 IEM_MC_ADVANCE_RIP();
6626 IEM_MC_END();
6627 return VINF_SUCCESS;
6628
6629 case IEMMODE_64BIT:
6630 IEM_MC_BEGIN(0, 2);
6631 IEM_MC_LOCAL(uint64_t, u64Value);
6632 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6635 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6636 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6637 IEM_MC_ADVANCE_RIP();
6638 IEM_MC_END();
6639 return VINF_SUCCESS;
6640
6641 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6642 }
6643 }
6644}
6645
6646
6647/** Opcode 0x0f 0xb7. */
6648FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6649{
6650 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6651 IEMOP_HLP_MIN_386();
6652
6653 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6654
6655 /** @todo Not entirely sure how the operand size prefix is handled here,
6656 * assuming that it will be ignored. Would be nice to have a few
6657 * test for this. */
6658 /*
6659 * If rm is denoting a register, no more instruction bytes.
6660 */
6661 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6662 {
6663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6664 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6665 {
6666 IEM_MC_BEGIN(0, 1);
6667 IEM_MC_LOCAL(uint32_t, u32Value);
6668 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6669 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6670 IEM_MC_ADVANCE_RIP();
6671 IEM_MC_END();
6672 }
6673 else
6674 {
6675 IEM_MC_BEGIN(0, 1);
6676 IEM_MC_LOCAL(uint64_t, u64Value);
6677 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6678 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6679 IEM_MC_ADVANCE_RIP();
6680 IEM_MC_END();
6681 }
6682 }
6683 else
6684 {
6685 /*
6686 * We're loading a register from memory.
6687 */
6688 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6689 {
6690 IEM_MC_BEGIN(0, 2);
6691 IEM_MC_LOCAL(uint32_t, u32Value);
6692 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6695 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6696 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6697 IEM_MC_ADVANCE_RIP();
6698 IEM_MC_END();
6699 }
6700 else
6701 {
6702 IEM_MC_BEGIN(0, 2);
6703 IEM_MC_LOCAL(uint64_t, u64Value);
6704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6707 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6708 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6709 IEM_MC_ADVANCE_RIP();
6710 IEM_MC_END();
6711 }
6712 }
6713 return VINF_SUCCESS;
6714}
6715
6716
6717/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6718FNIEMOP_UD_STUB(iemOp_jmpe);
6719/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6720FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6721
6722
6723/**
6724 * @opcode 0xb9
6725 * @opinvalid intel-modrm
6726 * @optest ->
6727 */
6728FNIEMOP_DEF(iemOp_Grp10)
6729{
6730 /*
6731 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
6732 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
6733 */
6734 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
6735 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZE); /* just picked Gb,Eb here. */
6736 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
6737}
6738
6739
6740/** Opcode 0x0f 0xba. */
6741FNIEMOP_DEF(iemOp_Grp8)
6742{
6743 IEMOP_HLP_MIN_386();
6744 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6745 PCIEMOPBINSIZES pImpl;
6746 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6747 {
6748 case 0: case 1: case 2: case 3:
6749 /* Both AMD and Intel want full modr/m decoding and imm8. */
6750 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
6751 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6752 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6753 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6754 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6755 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6756 }
6757 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6758
6759 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6760 {
6761 /* register destination. */
6762 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6764
6765 switch (pVCpu->iem.s.enmEffOpSize)
6766 {
6767 case IEMMODE_16BIT:
6768 IEM_MC_BEGIN(3, 0);
6769 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6770 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6771 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6772
6773 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6774 IEM_MC_REF_EFLAGS(pEFlags);
6775 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6776
6777 IEM_MC_ADVANCE_RIP();
6778 IEM_MC_END();
6779 return VINF_SUCCESS;
6780
6781 case IEMMODE_32BIT:
6782 IEM_MC_BEGIN(3, 0);
6783 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6784 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6785 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6786
6787 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6788 IEM_MC_REF_EFLAGS(pEFlags);
6789 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6790
6791 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6792 IEM_MC_ADVANCE_RIP();
6793 IEM_MC_END();
6794 return VINF_SUCCESS;
6795
6796 case IEMMODE_64BIT:
6797 IEM_MC_BEGIN(3, 0);
6798 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6799 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6800 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6801
6802 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6803 IEM_MC_REF_EFLAGS(pEFlags);
6804 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6805
6806 IEM_MC_ADVANCE_RIP();
6807 IEM_MC_END();
6808 return VINF_SUCCESS;
6809
6810 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6811 }
6812 }
6813 else
6814 {
6815 /* memory destination. */
6816
6817 uint32_t fAccess;
6818 if (pImpl->pfnLockedU16)
6819 fAccess = IEM_ACCESS_DATA_RW;
6820 else /* BT */
6821 fAccess = IEM_ACCESS_DATA_R;
6822
6823 /** @todo test negative bit offsets! */
6824 switch (pVCpu->iem.s.enmEffOpSize)
6825 {
6826 case IEMMODE_16BIT:
6827 IEM_MC_BEGIN(3, 1);
6828 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6829 IEM_MC_ARG(uint16_t, u16Src, 1);
6830 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6832
6833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6834 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6835 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6836 if (pImpl->pfnLockedU16)
6837 IEMOP_HLP_DONE_DECODING();
6838 else
6839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6840 IEM_MC_FETCH_EFLAGS(EFlags);
6841 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6842 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6843 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6844 else
6845 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6846 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6847
6848 IEM_MC_COMMIT_EFLAGS(EFlags);
6849 IEM_MC_ADVANCE_RIP();
6850 IEM_MC_END();
6851 return VINF_SUCCESS;
6852
6853 case IEMMODE_32BIT:
6854 IEM_MC_BEGIN(3, 1);
6855 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6856 IEM_MC_ARG(uint32_t, u32Src, 1);
6857 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6859
6860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6861 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6862 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6863 if (pImpl->pfnLockedU16)
6864 IEMOP_HLP_DONE_DECODING();
6865 else
6866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6867 IEM_MC_FETCH_EFLAGS(EFlags);
6868 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6869 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6870 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6871 else
6872 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6873 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6874
6875 IEM_MC_COMMIT_EFLAGS(EFlags);
6876 IEM_MC_ADVANCE_RIP();
6877 IEM_MC_END();
6878 return VINF_SUCCESS;
6879
6880 case IEMMODE_64BIT:
6881 IEM_MC_BEGIN(3, 1);
6882 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6883 IEM_MC_ARG(uint64_t, u64Src, 1);
6884 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6885 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6886
6887 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6888 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6889 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6890 if (pImpl->pfnLockedU16)
6891 IEMOP_HLP_DONE_DECODING();
6892 else
6893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6894 IEM_MC_FETCH_EFLAGS(EFlags);
6895 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6896 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6897 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6898 else
6899 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6900 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6901
6902 IEM_MC_COMMIT_EFLAGS(EFlags);
6903 IEM_MC_ADVANCE_RIP();
6904 IEM_MC_END();
6905 return VINF_SUCCESS;
6906
6907 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6908 }
6909 }
6910}
6911
6912
6913/** Opcode 0x0f 0xbb. */
6914FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6915{
6916 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6917 IEMOP_HLP_MIN_386();
6918 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6919}
6920
6921
6922/** Opcode 0x0f 0xbc. */
6923FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6924{
6925 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6926 IEMOP_HLP_MIN_386();
6927 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6928 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6929}
6930
6931
6932/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6933FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6934
6935
6936/** Opcode 0x0f 0xbd. */
6937FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6938{
6939 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6940 IEMOP_HLP_MIN_386();
6941 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6942 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6943}
6944
6945
6946/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6947FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6948
6949
6950/** Opcode 0x0f 0xbe. */
6951FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6952{
6953 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6954 IEMOP_HLP_MIN_386();
6955
6956 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6957
6958 /*
6959 * If rm is denoting a register, no more instruction bytes.
6960 */
6961 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6962 {
6963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6964 switch (pVCpu->iem.s.enmEffOpSize)
6965 {
6966 case IEMMODE_16BIT:
6967 IEM_MC_BEGIN(0, 1);
6968 IEM_MC_LOCAL(uint16_t, u16Value);
6969 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6970 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6971 IEM_MC_ADVANCE_RIP();
6972 IEM_MC_END();
6973 return VINF_SUCCESS;
6974
6975 case IEMMODE_32BIT:
6976 IEM_MC_BEGIN(0, 1);
6977 IEM_MC_LOCAL(uint32_t, u32Value);
6978 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6979 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6980 IEM_MC_ADVANCE_RIP();
6981 IEM_MC_END();
6982 return VINF_SUCCESS;
6983
6984 case IEMMODE_64BIT:
6985 IEM_MC_BEGIN(0, 1);
6986 IEM_MC_LOCAL(uint64_t, u64Value);
6987 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6988 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6989 IEM_MC_ADVANCE_RIP();
6990 IEM_MC_END();
6991 return VINF_SUCCESS;
6992
6993 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6994 }
6995 }
6996 else
6997 {
6998 /*
6999 * We're loading a register from memory.
7000 */
7001 switch (pVCpu->iem.s.enmEffOpSize)
7002 {
7003 case IEMMODE_16BIT:
7004 IEM_MC_BEGIN(0, 2);
7005 IEM_MC_LOCAL(uint16_t, u16Value);
7006 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7007 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7009 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7010 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7011 IEM_MC_ADVANCE_RIP();
7012 IEM_MC_END();
7013 return VINF_SUCCESS;
7014
7015 case IEMMODE_32BIT:
7016 IEM_MC_BEGIN(0, 2);
7017 IEM_MC_LOCAL(uint32_t, u32Value);
7018 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7019 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7021 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7022 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7023 IEM_MC_ADVANCE_RIP();
7024 IEM_MC_END();
7025 return VINF_SUCCESS;
7026
7027 case IEMMODE_64BIT:
7028 IEM_MC_BEGIN(0, 2);
7029 IEM_MC_LOCAL(uint64_t, u64Value);
7030 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7031 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7033 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7034 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7035 IEM_MC_ADVANCE_RIP();
7036 IEM_MC_END();
7037 return VINF_SUCCESS;
7038
7039 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7040 }
7041 }
7042}
7043
7044
7045/** Opcode 0x0f 0xbf. */
7046FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7047{
7048 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7049 IEMOP_HLP_MIN_386();
7050
7051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7052
7053 /** @todo Not entirely sure how the operand size prefix is handled here,
7054 * assuming that it will be ignored. Would be nice to have a few
7055 * test for this. */
7056 /*
7057 * If rm is denoting a register, no more instruction bytes.
7058 */
7059 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7060 {
7061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7062 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7063 {
7064 IEM_MC_BEGIN(0, 1);
7065 IEM_MC_LOCAL(uint32_t, u32Value);
7066 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7067 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7068 IEM_MC_ADVANCE_RIP();
7069 IEM_MC_END();
7070 }
7071 else
7072 {
7073 IEM_MC_BEGIN(0, 1);
7074 IEM_MC_LOCAL(uint64_t, u64Value);
7075 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7076 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7077 IEM_MC_ADVANCE_RIP();
7078 IEM_MC_END();
7079 }
7080 }
7081 else
7082 {
7083 /*
7084 * We're loading a register from memory.
7085 */
7086 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7087 {
7088 IEM_MC_BEGIN(0, 2);
7089 IEM_MC_LOCAL(uint32_t, u32Value);
7090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7093 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7094 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7095 IEM_MC_ADVANCE_RIP();
7096 IEM_MC_END();
7097 }
7098 else
7099 {
7100 IEM_MC_BEGIN(0, 2);
7101 IEM_MC_LOCAL(uint64_t, u64Value);
7102 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7105 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7106 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7107 IEM_MC_ADVANCE_RIP();
7108 IEM_MC_END();
7109 }
7110 }
7111 return VINF_SUCCESS;
7112}
7113
7114
7115/** Opcode 0x0f 0xc0. */
7116FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7117{
7118 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7119 IEMOP_HLP_MIN_486();
7120 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7121
7122 /*
7123 * If rm is denoting a register, no more instruction bytes.
7124 */
7125 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7126 {
7127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7128
7129 IEM_MC_BEGIN(3, 0);
7130 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7131 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7133
7134 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7135 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7136 IEM_MC_REF_EFLAGS(pEFlags);
7137 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7138
7139 IEM_MC_ADVANCE_RIP();
7140 IEM_MC_END();
7141 }
7142 else
7143 {
7144 /*
7145 * We're accessing memory.
7146 */
7147 IEM_MC_BEGIN(3, 3);
7148 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7149 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7150 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7151 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7152 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7153
7154 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7155 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7156 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7157 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7158 IEM_MC_FETCH_EFLAGS(EFlags);
7159 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7160 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7161 else
7162 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7163
7164 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7165 IEM_MC_COMMIT_EFLAGS(EFlags);
7166 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7167 IEM_MC_ADVANCE_RIP();
7168 IEM_MC_END();
7169 return VINF_SUCCESS;
7170 }
7171 return VINF_SUCCESS;
7172}
7173
7174
7175/** Opcode 0x0f 0xc1. */
7176FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7177{
7178 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7179 IEMOP_HLP_MIN_486();
7180 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7181
7182 /*
7183 * If rm is denoting a register, no more instruction bytes.
7184 */
7185 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7186 {
7187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7188
7189 switch (pVCpu->iem.s.enmEffOpSize)
7190 {
7191 case IEMMODE_16BIT:
7192 IEM_MC_BEGIN(3, 0);
7193 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7194 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7195 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7196
7197 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7198 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7199 IEM_MC_REF_EFLAGS(pEFlags);
7200 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7201
7202 IEM_MC_ADVANCE_RIP();
7203 IEM_MC_END();
7204 return VINF_SUCCESS;
7205
7206 case IEMMODE_32BIT:
7207 IEM_MC_BEGIN(3, 0);
7208 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7209 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7210 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7211
7212 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7213 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7214 IEM_MC_REF_EFLAGS(pEFlags);
7215 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7216
7217 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7218 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7219 IEM_MC_ADVANCE_RIP();
7220 IEM_MC_END();
7221 return VINF_SUCCESS;
7222
7223 case IEMMODE_64BIT:
7224 IEM_MC_BEGIN(3, 0);
7225 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7226 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7227 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7228
7229 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7230 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7231 IEM_MC_REF_EFLAGS(pEFlags);
7232 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7233
7234 IEM_MC_ADVANCE_RIP();
7235 IEM_MC_END();
7236 return VINF_SUCCESS;
7237
7238 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7239 }
7240 }
7241 else
7242 {
7243 /*
7244 * We're accessing memory.
7245 */
7246 switch (pVCpu->iem.s.enmEffOpSize)
7247 {
7248 case IEMMODE_16BIT:
7249 IEM_MC_BEGIN(3, 3);
7250 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7251 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7252 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7253 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7255
7256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7257 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7258 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7259 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7260 IEM_MC_FETCH_EFLAGS(EFlags);
7261 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7262 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7263 else
7264 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7265
7266 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7267 IEM_MC_COMMIT_EFLAGS(EFlags);
7268 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7269 IEM_MC_ADVANCE_RIP();
7270 IEM_MC_END();
7271 return VINF_SUCCESS;
7272
7273 case IEMMODE_32BIT:
7274 IEM_MC_BEGIN(3, 3);
7275 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7276 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7277 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7278 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7279 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7280
7281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7282 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7283 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7284 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7285 IEM_MC_FETCH_EFLAGS(EFlags);
7286 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7287 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7288 else
7289 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7290
7291 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7292 IEM_MC_COMMIT_EFLAGS(EFlags);
7293 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7294 IEM_MC_ADVANCE_RIP();
7295 IEM_MC_END();
7296 return VINF_SUCCESS;
7297
7298 case IEMMODE_64BIT:
7299 IEM_MC_BEGIN(3, 3);
7300 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7301 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7302 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7303 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7305
7306 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7307 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7308 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7309 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7310 IEM_MC_FETCH_EFLAGS(EFlags);
7311 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7312 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7313 else
7314 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7315
7316 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7317 IEM_MC_COMMIT_EFLAGS(EFlags);
7318 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7319 IEM_MC_ADVANCE_RIP();
7320 IEM_MC_END();
7321 return VINF_SUCCESS;
7322
7323 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7324 }
7325 }
7326}
7327
7328
7329/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
7330FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
7331/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
7332FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
7333/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
7334FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
7335/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
7336FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
7337
7338
7339/** Opcode 0x0f 0xc3. */
7340FNIEMOP_DEF(iemOp_movnti_My_Gy)
7341{
7342 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7343
7344 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7345
7346 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7347 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7348 {
7349 switch (pVCpu->iem.s.enmEffOpSize)
7350 {
7351 case IEMMODE_32BIT:
7352 IEM_MC_BEGIN(0, 2);
7353 IEM_MC_LOCAL(uint32_t, u32Value);
7354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7355
7356 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7358 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7359 return IEMOP_RAISE_INVALID_OPCODE();
7360
7361 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7362 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7363 IEM_MC_ADVANCE_RIP();
7364 IEM_MC_END();
7365 break;
7366
7367 case IEMMODE_64BIT:
7368 IEM_MC_BEGIN(0, 2);
7369 IEM_MC_LOCAL(uint64_t, u64Value);
7370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7371
7372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7374 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7375 return IEMOP_RAISE_INVALID_OPCODE();
7376
7377 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7378 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7379 IEM_MC_ADVANCE_RIP();
7380 IEM_MC_END();
7381 break;
7382
7383 case IEMMODE_16BIT:
7384 /** @todo check this form. */
7385 return IEMOP_RAISE_INVALID_OPCODE();
7386 }
7387 }
7388 else
7389 return IEMOP_RAISE_INVALID_OPCODE();
7390 return VINF_SUCCESS;
7391}
7392/* Opcode 0x66 0x0f 0xc3 - invalid */
7393/* Opcode 0xf3 0x0f 0xc3 - invalid */
7394/* Opcode 0xf2 0x0f 0xc3 - invalid */
7395
7396/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
7397FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7398/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
7399FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
7400/* Opcode 0xf3 0x0f 0xc4 - invalid */
7401/* Opcode 0xf2 0x0f 0xc4 - invalid */
7402
7403/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7404FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7405/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
7406FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
7407/* Opcode 0xf3 0x0f 0xc5 - invalid */
7408/* Opcode 0xf2 0x0f 0xc5 - invalid */
7409
7410/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
7411FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
7412/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
7413FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
7414/* Opcode 0xf3 0x0f 0xc6 - invalid */
7415/* Opcode 0xf2 0x0f 0xc6 - invalid */
7416
7417
7418/** Opcode 0x0f 0xc7 !11/1. */
7419FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7420{
7421 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7422
7423 IEM_MC_BEGIN(4, 3);
7424 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7425 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7426 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7427 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7428 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7429 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7431
7432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7433 IEMOP_HLP_DONE_DECODING();
7434 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7435
7436 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7437 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7438 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7439
7440 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7441 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7442 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7443
7444 IEM_MC_FETCH_EFLAGS(EFlags);
7445 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7446 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7447 else
7448 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7449
7450 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7451 IEM_MC_COMMIT_EFLAGS(EFlags);
7452 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7453 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7454 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7455 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7456 IEM_MC_ENDIF();
7457 IEM_MC_ADVANCE_RIP();
7458
7459 IEM_MC_END();
7460 return VINF_SUCCESS;
7461}
7462
7463
7464/** Opcode REX.W 0x0f 0xc7 !11/1. */
7465FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7466{
7467 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7468 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7469 {
7470#if 0
7471 RT_NOREF(bRm);
7472 IEMOP_BITCH_ABOUT_STUB();
7473 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7474#else
7475 IEM_MC_BEGIN(4, 3);
7476 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7477 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7478 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7479 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7480 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7481 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7483
7484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7485 IEMOP_HLP_DONE_DECODING();
7486 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7487 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7488
7489 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7490 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7491 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7492
7493 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7494 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7495 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7496
7497 IEM_MC_FETCH_EFLAGS(EFlags);
7498# ifdef RT_ARCH_AMD64
7499 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7500 {
7501 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7502 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7503 else
7504 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7505 }
7506 else
7507# endif
7508 {
7509 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7510 accesses and not all all atomic, which works fine on in UNI CPU guest
7511 configuration (ignoring DMA). If guest SMP is active we have no choice
7512 but to use a rendezvous callback here. Sigh. */
7513 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7514 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7515 else
7516 {
7517 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7518 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7519 }
7520 }
7521
7522 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7523 IEM_MC_COMMIT_EFLAGS(EFlags);
7524 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7525 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7526 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7527 IEM_MC_ENDIF();
7528 IEM_MC_ADVANCE_RIP();
7529
7530 IEM_MC_END();
7531 return VINF_SUCCESS;
7532#endif
7533 }
7534 Log(("cmpxchg16b -> #UD\n"));
7535 return IEMOP_RAISE_INVALID_OPCODE();
7536}
7537
7538FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
7539{
7540 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7541 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7542 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7543}
7544
7545/** Opcode 0x0f 0xc7 11/6. */
7546FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7547
7548/** Opcode 0x0f 0xc7 !11/6. */
7549FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7550
7551/** Opcode 0x66 0x0f 0xc7 !11/6. */
7552FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7553
7554/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7555FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7556
7557/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7558FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7559
7560/** Opcode 0x0f 0xc7 11/7. */
7561FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
7562
7563
7564/**
7565 * Group 9 jump table for register variant.
7566 */
7567IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
7568{ /* pfx: none, 066h, 0f3h, 0f2h */
7569 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
7570 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
7571 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
7572 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
7573 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7574 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
7575 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7576 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7577};
7578AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
7579
7580
7581/**
7582 * Group 9 jump table for memory variant.
7583 */
7584IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
7585{ /* pfx: none, 066h, 0f3h, 0f2h */
7586 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
7587 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
7588 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
7589 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
7590 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7591 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
7592 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
7593 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7594};
7595AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
7596
7597
7598/** Opcode 0x0f 0xc7. */
7599FNIEMOP_DEF(iemOp_Grp9)
7600{
7601 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7602 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7603 /* register, register */
7604 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7605 + pVCpu->iem.s.idxPrefix], bRm);
7606 /* memory, register */
7607 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7608 + pVCpu->iem.s.idxPrefix], bRm);
7609}
7610
7611
7612/**
7613 * Common 'bswap register' helper.
7614 */
7615FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7616{
7617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7618 switch (pVCpu->iem.s.enmEffOpSize)
7619 {
7620 case IEMMODE_16BIT:
7621 IEM_MC_BEGIN(1, 0);
7622 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7623 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7624 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7625 IEM_MC_ADVANCE_RIP();
7626 IEM_MC_END();
7627 return VINF_SUCCESS;
7628
7629 case IEMMODE_32BIT:
7630 IEM_MC_BEGIN(1, 0);
7631 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7632 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7633 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7634 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7635 IEM_MC_ADVANCE_RIP();
7636 IEM_MC_END();
7637 return VINF_SUCCESS;
7638
7639 case IEMMODE_64BIT:
7640 IEM_MC_BEGIN(1, 0);
7641 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7642 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7643 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7644 IEM_MC_ADVANCE_RIP();
7645 IEM_MC_END();
7646 return VINF_SUCCESS;
7647
7648 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7649 }
7650}
7651
7652
7653/** Opcode 0x0f 0xc8. */
7654FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7655{
7656 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7657 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7658 prefix. REX.B is the correct prefix it appears. For a parallel
7659 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7660 IEMOP_HLP_MIN_486();
7661 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7662}
7663
7664
7665/** Opcode 0x0f 0xc9. */
7666FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7667{
7668 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7669 IEMOP_HLP_MIN_486();
7670 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7671}
7672
7673
7674/** Opcode 0x0f 0xca. */
7675FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7676{
7677 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7678 IEMOP_HLP_MIN_486();
7679 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7680}
7681
7682
7683/** Opcode 0x0f 0xcb. */
7684FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7685{
7686 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7687 IEMOP_HLP_MIN_486();
7688 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7689}
7690
7691
7692/** Opcode 0x0f 0xcc. */
7693FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7694{
7695 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7696 IEMOP_HLP_MIN_486();
7697 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7698}
7699
7700
7701/** Opcode 0x0f 0xcd. */
7702FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7703{
7704 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7705 IEMOP_HLP_MIN_486();
7706 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7707}
7708
7709
7710/** Opcode 0x0f 0xce. */
7711FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7712{
7713 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7714 IEMOP_HLP_MIN_486();
7715 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7716}
7717
7718
7719/** Opcode 0x0f 0xcf. */
7720FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7721{
7722 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7723 IEMOP_HLP_MIN_486();
7724 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7725}
7726
7727
7728/* Opcode 0x0f 0xd0 - invalid */
7729/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
7730FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
7731/* Opcode 0xf3 0x0f 0xd0 - invalid */
7732/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
7733FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
7734
7735/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7736FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7737/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
7738FNIEMOP_STUB(iemOp_psrlw_Vx_W);
7739/* Opcode 0xf3 0x0f 0xd1 - invalid */
7740/* Opcode 0xf2 0x0f 0xd1 - invalid */
7741
7742/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7743FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7744/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
7745FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
7746/* Opcode 0xf3 0x0f 0xd2 - invalid */
7747/* Opcode 0xf2 0x0f 0xd2 - invalid */
7748
7749/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7750FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7751/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
7752FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
7753/* Opcode 0xf3 0x0f 0xd3 - invalid */
7754/* Opcode 0xf2 0x0f 0xd3 - invalid */
7755
7756/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7757FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7758/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
7759FNIEMOP_STUB(iemOp_paddq_Vx_W);
7760/* Opcode 0xf3 0x0f 0xd4 - invalid */
7761/* Opcode 0xf2 0x0f 0xd4 - invalid */
7762
7763/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7764FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7765/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
7766FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
7767/* Opcode 0xf3 0x0f 0xd5 - invalid */
7768/* Opcode 0xf2 0x0f 0xd5 - invalid */
7769
7770/* Opcode 0x0f 0xd6 - invalid */
7771
7772/**
7773 * @opcode 0xd6
7774 * @oppfx 0x66
7775 * @opcpuid sse2
7776 * @opgroup og_sse2_pcksclr_datamove
7777 * @opxcpttype none
7778 * @optest op1=-1 op2=2 -> op1=2
7779 * @optest op1=0 op2=-42 -> op1=-42
7780 */
7781FNIEMOP_DEF(iemOp_movq_Wq_Vq)
7782{
7783 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
7784 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7785 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7786 {
7787 /*
7788 * Register, register.
7789 */
7790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7791 IEM_MC_BEGIN(0, 2);
7792 IEM_MC_LOCAL(uint64_t, uSrc);
7793
7794 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7795 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7796
7797 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7798 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
7799
7800 IEM_MC_ADVANCE_RIP();
7801 IEM_MC_END();
7802 }
7803 else
7804 {
7805 /*
7806 * Memory, register.
7807 */
7808 IEM_MC_BEGIN(0, 2);
7809 IEM_MC_LOCAL(uint64_t, uSrc);
7810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7811
7812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7814 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7815 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7816
7817 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7818 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7819
7820 IEM_MC_ADVANCE_RIP();
7821 IEM_MC_END();
7822 }
7823 return VINF_SUCCESS;
7824}
7825
7826
7827/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7828FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7829/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7830FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7831#if 0
7832FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7833{
7834 /* Docs says register only. */
7835 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7836
7837 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7838 {
7839 case IEM_OP_PRF_SIZE_OP: /* SSE */
7840 I E M O P _ M N E M O N I C(movq_Wq_Vq, "movq Wq,Vq");
7841 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7842 IEM_MC_BEGIN(2, 0);
7843 IEM_MC_ARG(uint64_t *, pDst, 0);
7844 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
7845 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7846 IEM_MC_PREPARE_SSE_USAGE();
7847 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7848 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7849 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7850 IEM_MC_ADVANCE_RIP();
7851 IEM_MC_END();
7852 return VINF_SUCCESS;
7853
7854 case 0: /* MMX */
7855 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7856 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7857 IEM_MC_BEGIN(2, 0);
7858 IEM_MC_ARG(uint64_t *, pDst, 0);
7859 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7860 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7861 IEM_MC_PREPARE_FPU_USAGE();
7862 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7863 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7864 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7865 IEM_MC_ADVANCE_RIP();
7866 IEM_MC_END();
7867 return VINF_SUCCESS;
7868
7869 default:
7870 return IEMOP_RAISE_INVALID_OPCODE();
7871 }
7872}
7873#endif
7874
7875
7876/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
7877FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
7878{
7879 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7880 /** @todo testcase: Check that the instruction implicitly clears the high
7881 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7882 * and opcode modifications are made to work with the whole width (not
7883 * just 128). */
7884 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
7885 /* Docs says register only. */
7886 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7887 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7888 {
7889 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7890 IEM_MC_BEGIN(2, 0);
7891 IEM_MC_ARG(uint64_t *, pDst, 0);
7892 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7893 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7894 IEM_MC_PREPARE_FPU_USAGE();
7895 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7896 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7897 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7898 IEM_MC_ADVANCE_RIP();
7899 IEM_MC_END();
7900 return VINF_SUCCESS;
7901 }
7902 return IEMOP_RAISE_INVALID_OPCODE();
7903}
7904
7905/** Opcode 0x66 0x0f 0xd7 - */
7906FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
7907{
7908 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7909 /** @todo testcase: Check that the instruction implicitly clears the high
7910 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7911 * and opcode modifications are made to work with the whole width (not
7912 * just 128). */
7913 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
7914 /* Docs says register only. */
7915 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7916 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7917 {
7918 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7919 IEM_MC_BEGIN(2, 0);
7920 IEM_MC_ARG(uint64_t *, pDst, 0);
7921 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
7922 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7923 IEM_MC_PREPARE_SSE_USAGE();
7924 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7925 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7926 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7927 IEM_MC_ADVANCE_RIP();
7928 IEM_MC_END();
7929 return VINF_SUCCESS;
7930 }
7931 return IEMOP_RAISE_INVALID_OPCODE();
7932}
7933
7934/* Opcode 0xf3 0x0f 0xd7 - invalid */
7935/* Opcode 0xf2 0x0f 0xd7 - invalid */
7936
7937
7938/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7939FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7940/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
7941FNIEMOP_STUB(iemOp_psubusb_Vx_W);
7942/* Opcode 0xf3 0x0f 0xd8 - invalid */
7943/* Opcode 0xf2 0x0f 0xd8 - invalid */
7944
7945/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7946FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7947/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
7948FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
7949/* Opcode 0xf3 0x0f 0xd9 - invalid */
7950/* Opcode 0xf2 0x0f 0xd9 - invalid */
7951
7952/** Opcode 0x0f 0xda - pminub Pq, Qq */
7953FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7954/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
7955FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
7956/* Opcode 0xf3 0x0f 0xda - invalid */
7957/* Opcode 0xf2 0x0f 0xda - invalid */
7958
7959/** Opcode 0x0f 0xdb - pand Pq, Qq */
7960FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7961/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
7962FNIEMOP_STUB(iemOp_pand_Vx_W);
7963/* Opcode 0xf3 0x0f 0xdb - invalid */
7964/* Opcode 0xf2 0x0f 0xdb - invalid */
7965
7966/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7967FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7968/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
7969FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
7970/* Opcode 0xf3 0x0f 0xdc - invalid */
7971/* Opcode 0xf2 0x0f 0xdc - invalid */
7972
7973/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7974FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7975/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
7976FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
7977/* Opcode 0xf3 0x0f 0xdd - invalid */
7978/* Opcode 0xf2 0x0f 0xdd - invalid */
7979
7980/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7981FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7982/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
7983FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
7984/* Opcode 0xf3 0x0f 0xde - invalid */
7985/* Opcode 0xf2 0x0f 0xde - invalid */
7986
7987/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7988FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7989/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
7990FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
7991/* Opcode 0xf3 0x0f 0xdf - invalid */
7992/* Opcode 0xf2 0x0f 0xdf - invalid */
7993
7994/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7995FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7996/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
7997FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
7998/* Opcode 0xf3 0x0f 0xe0 - invalid */
7999/* Opcode 0xf2 0x0f 0xe0 - invalid */
8000
8001/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
8002FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
8003/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
8004FNIEMOP_STUB(iemOp_psraw_Vx_W);
8005/* Opcode 0xf3 0x0f 0xe1 - invalid */
8006/* Opcode 0xf2 0x0f 0xe1 - invalid */
8007
8008/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
8009FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
8010/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
8011FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
8012/* Opcode 0xf3 0x0f 0xe2 - invalid */
8013/* Opcode 0xf2 0x0f 0xe2 - invalid */
8014
8015/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
8016FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
8017/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
8018FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
8019/* Opcode 0xf3 0x0f 0xe3 - invalid */
8020/* Opcode 0xf2 0x0f 0xe3 - invalid */
8021
8022/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
8023FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
8024/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
8025FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
8026/* Opcode 0xf3 0x0f 0xe4 - invalid */
8027/* Opcode 0xf2 0x0f 0xe4 - invalid */
8028
8029/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
8030FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
8031/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
8032FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
8033/* Opcode 0xf3 0x0f 0xe5 - invalid */
8034/* Opcode 0xf2 0x0f 0xe5 - invalid */
8035
8036/* Opcode 0x0f 0xe6 - invalid */
8037/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
8038FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
8039/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
8040FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
8041/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
8042FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
8043
8044
8045/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
8046FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
8047{
8048 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
8049 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8050 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8051 {
8052 /* Register, memory. */
8053 IEM_MC_BEGIN(0, 2);
8054 IEM_MC_LOCAL(uint64_t, uSrc);
8055 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8056
8057 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8059 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
8060 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8061
8062 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8063 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8064
8065 IEM_MC_ADVANCE_RIP();
8066 IEM_MC_END();
8067 return VINF_SUCCESS;
8068 }
8069 /* The register, register encoding is invalid. */
8070 return IEMOP_RAISE_INVALID_OPCODE();
8071}
8072
8073/** Opcode 0x66 0x0f 0xe7 - movntdq Mx, Vx */
8074FNIEMOP_DEF(iemOp_movntdq_Mx_Vx)
8075{
8076 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8077 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8078 {
8079 /* Register, memory. */
8080 IEMOP_MNEMONIC(movntdq_Mx_Vx, "movntdq Mx,Vx");
8081 IEM_MC_BEGIN(0, 2);
8082 IEM_MC_LOCAL(RTUINT128U, uSrc);
8083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8084
8085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8087 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8088 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8089
8090 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8091 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8092
8093 IEM_MC_ADVANCE_RIP();
8094 IEM_MC_END();
8095 return VINF_SUCCESS;
8096 }
8097
8098 /* The register, register encoding is invalid. */
8099 return IEMOP_RAISE_INVALID_OPCODE();
8100}
8101
8102/* Opcode 0xf3 0x0f 0xe7 - invalid */
8103/* Opcode 0xf2 0x0f 0xe7 - invalid */
8104
8105
8106/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
8107FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
8108/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
8109FNIEMOP_STUB(iemOp_psubsb_Vx_W);
8110/* Opcode 0xf3 0x0f 0xe8 - invalid */
8111/* Opcode 0xf2 0x0f 0xe8 - invalid */
8112
8113/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
8114FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
8115/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
8116FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
8117/* Opcode 0xf3 0x0f 0xe9 - invalid */
8118/* Opcode 0xf2 0x0f 0xe9 - invalid */
8119
8120/** Opcode 0x0f 0xea - pminsw Pq, Qq */
8121FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
8122/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
8123FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
8124/* Opcode 0xf3 0x0f 0xea - invalid */
8125/* Opcode 0xf2 0x0f 0xea - invalid */
8126
8127/** Opcode 0x0f 0xeb - por Pq, Qq */
8128FNIEMOP_STUB(iemOp_por_Pq_Qq);
8129/** Opcode 0x66 0x0f 0xeb - por Vx, W */
8130FNIEMOP_STUB(iemOp_por_Vx_W);
8131/* Opcode 0xf3 0x0f 0xeb - invalid */
8132/* Opcode 0xf2 0x0f 0xeb - invalid */
8133
8134/** Opcode 0x0f 0xec - paddsb Pq, Qq */
8135FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
8136/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
8137FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
8138/* Opcode 0xf3 0x0f 0xec - invalid */
8139/* Opcode 0xf2 0x0f 0xec - invalid */
8140
8141/** Opcode 0x0f 0xed - paddsw Pq, Qq */
8142FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
8143/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
8144FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
8145/* Opcode 0xf3 0x0f 0xed - invalid */
8146/* Opcode 0xf2 0x0f 0xed - invalid */
8147
8148/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
8149FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
8150/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
8151FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
8152/* Opcode 0xf3 0x0f 0xee - invalid */
8153/* Opcode 0xf2 0x0f 0xee - invalid */
8154
8155
8156/** Opcode 0x0f 0xef - pxor Pq, Qq */
8157FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
8158{
8159 IEMOP_MNEMONIC(pxor, "pxor");
8160 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
8161}
8162
8163/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
8164FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
8165{
8166 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
8167 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8168}
8169
8170/* Opcode 0xf3 0x0f 0xef - invalid */
8171/* Opcode 0xf2 0x0f 0xef - invalid */
8172
8173/* Opcode 0x0f 0xf0 - invalid */
8174/* Opcode 0x66 0x0f 0xf0 - invalid */
8175/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
8176FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
8177
8178/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8179FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8180/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
8181FNIEMOP_STUB(iemOp_psllw_Vx_W);
8182/* Opcode 0xf2 0x0f 0xf1 - invalid */
8183
8184/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8185FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8186/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
8187FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
8188/* Opcode 0xf2 0x0f 0xf2 - invalid */
8189
8190/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8191FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8192/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
8193FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
8194/* Opcode 0xf2 0x0f 0xf3 - invalid */
8195
8196/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8197FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8198/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
8199FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
8200/* Opcode 0xf2 0x0f 0xf4 - invalid */
8201
8202/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8203FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
8204/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
8205FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
8206/* Opcode 0xf2 0x0f 0xf5 - invalid */
8207
8208/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
8209FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
8210/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
8211FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
8212/* Opcode 0xf2 0x0f 0xf6 - invalid */
8213
8214/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
8215FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
8216/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
8217FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
8218/* Opcode 0xf2 0x0f 0xf7 - invalid */
8219
8220/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
8221FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
8222/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
8223FNIEMOP_STUB(iemOp_psubb_Vx_W);
8224/* Opcode 0xf2 0x0f 0xf8 - invalid */
8225
8226/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
8227FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
8228/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
8229FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
8230/* Opcode 0xf2 0x0f 0xf9 - invalid */
8231
8232/** Opcode 0x0f 0xfa - psubd Pq, Qq */
8233FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
8234/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
8235FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
8236/* Opcode 0xf2 0x0f 0xfa - invalid */
8237
8238/** Opcode 0x0f 0xfb - psubq Pq, Qq */
8239FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
8240/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
8241FNIEMOP_STUB(iemOp_psubq_Vx_W);
8242/* Opcode 0xf2 0x0f 0xfb - invalid */
8243
8244/** Opcode 0x0f 0xfc - paddb Pq, Qq */
8245FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
8246/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
8247FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
8248/* Opcode 0xf2 0x0f 0xfc - invalid */
8249
8250/** Opcode 0x0f 0xfd - paddw Pq, Qq */
8251FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
8252/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
8253FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
8254/* Opcode 0xf2 0x0f 0xfd - invalid */
8255
8256/** Opcode 0x0f 0xfe - paddd Pq, Qq */
8257FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
8258/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
8259FNIEMOP_STUB(iemOp_paddd_Vx_W);
8260/* Opcode 0xf2 0x0f 0xfe - invalid */
8261
8262
8263/** Opcode **** 0x0f 0xff - UD0 */
8264FNIEMOP_DEF(iemOp_ud0)
8265{
8266 IEMOP_MNEMONIC(ud0, "ud0");
8267 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
8268 {
8269 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
8270#ifndef TST_IEM_CHECK_MC
8271 RTGCPTR GCPtrEff;
8272 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
8273 if (rcStrict != VINF_SUCCESS)
8274 return rcStrict;
8275#endif
8276 IEMOP_HLP_DONE_DECODING();
8277 }
8278 return IEMOP_RAISE_INVALID_OPCODE();
8279}
8280
8281
8282
8283/**
8284 * Two byte opcode map, first byte 0x0f.
8285 *
8286 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
8287 * check if it needs updating as well when making changes.
8288 */
8289IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
8290{
8291 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8292 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
8293 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
8294 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
8295 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
8296 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
8297 /* 0x05 */ IEMOP_X4(iemOp_syscall),
8298 /* 0x06 */ IEMOP_X4(iemOp_clts),
8299 /* 0x07 */ IEMOP_X4(iemOp_sysret),
8300 /* 0x08 */ IEMOP_X4(iemOp_invd),
8301 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
8302 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
8303 /* 0x0b */ IEMOP_X4(iemOp_ud2),
8304 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
8305 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
8306 /* 0x0e */ IEMOP_X4(iemOp_femms),
8307 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
8308
8309 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vx_Wsd,
8310 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
8311 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
8312 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8313 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8314 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8315 /* 0x16 */ iemOp_movhpsv1_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpdv1_Vdq_Mq, iemOp_movshdup_Vx_Wx, iemOp_InvalidNeedRM,
8316 /* 0x17 */ iemOp_movhpsv1_Mq_Vq, iemOp_movhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8317 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
8318 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
8319 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
8320 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
8321 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
8322 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
8323 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
8324 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
8325
8326 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8327 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8328 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8329 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8330 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8331 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8332 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8333 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8334 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8335 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8336 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
8337 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8338 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
8339 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
8340 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8341 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8342
8343 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8344 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8345 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8346 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8347 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8348 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8349 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8350 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8351 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
8352 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8353 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
8354 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8355 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8356 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8357 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8358 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8359
8360 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
8361 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
8362 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
8363 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
8364 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
8365 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
8366 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
8367 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
8368 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
8369 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
8370 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8371 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8372 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8373 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8374 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8375 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8376
8377 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8378 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
8379 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
8380 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
8381 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8382 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8383 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8384 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8385 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
8386 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
8387 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
8388 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8389 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
8390 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
8391 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
8392 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
8393
8394 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8395 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8396 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8397 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8398 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8399 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8400 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8401 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8402 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8403 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8404 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8405 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8406 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8407 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8408 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8409 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vx_Wx, iemOp_movdqu_Vx_Wx, iemOp_InvalidNeedRM,
8410
8411 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
8412 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8413 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8414 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8415 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8416 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8417 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8418 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8419
8420 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8421 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8422 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8423 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8424 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
8425 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
8426 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
8427 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
8428
8429 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8430 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8431 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8432 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8433 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8434 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8435 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8436 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8437 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8438 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8439 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8440 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8441 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8442 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8443 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8444 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8445
8446 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8447 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8448 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8449 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8450 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8451 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8452 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8453 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8454 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8455 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8456 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8457 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8458 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8459 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8460 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8461 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8462
8463 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8464 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8465 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8466 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8467 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8468 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8469 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8470 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8471 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8472 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8473 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8474 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8475 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8476 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8477 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8478 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8479
8480 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8481 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8482 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8483 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8484 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8485 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8486 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8487 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8488 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8489 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8490 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8491 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8492 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8493 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8494 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8495 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8496
8497 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8498 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8499 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
8500 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8501 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8502 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8503 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8504 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8505 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8506 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8507 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8508 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8509 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8510 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8511 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8512 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8513
8514 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
8515 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8516 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8517 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8518 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8519 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8520 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8521 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8522 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8523 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8524 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8525 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8526 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8527 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8528 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8529 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8530
8531 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8532 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8533 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8534 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8535 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8536 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8537 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
8538 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8539 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8540 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8541 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8542 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8543 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8544 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8545 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8546 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8547
8548 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
8549 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8550 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8551 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8552 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8553 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8554 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8555 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8556 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8557 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8558 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8559 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8560 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8561 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8562 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8563 /* 0xff */ IEMOP_X4(iemOp_ud0),
8564};
8565AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8566
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette